language
stringclasses 6
values | original_string
stringlengths 25
887k
| text
stringlengths 25
887k
|
---|---|---|
Python | def run(self):
"""Override the base class run() method"""
while True:
with self.m_cv:
self.m_cv.wait_for(lambda: self.m_command is not None)
if self.m_command == Playlist.EXIT:
self._stop_playing()
return
if self.m_command == Playlist.STOP_PLAYING:
self._stop_playing()
if self.m_command == Playlist.NEW_PLAYLIST:
self._play(self.m_args)
self.m_command = None
self.m_args = None | def run(self):
"""Override the base class run() method"""
while True:
with self.m_cv:
self.m_cv.wait_for(lambda: self.m_command is not None)
if self.m_command == Playlist.EXIT:
self._stop_playing()
return
if self.m_command == Playlist.STOP_PLAYING:
self._stop_playing()
if self.m_command == Playlist.NEW_PLAYLIST:
self._play(self.m_args)
self.m_command = None
self.m_args = None |
Python | def play(self, playlist):
"""Tell the player to play a new playlist
This method can be called from any thread. If the player is already playing
something it is interrupted
Args:
playlist str what to play
"""
with self.m_cv:
self.m_args = playlist
self.m_command = Playlist.NEW_PLAYLIST
self.m_cv.notify() | def play(self, playlist):
"""Tell the player to play a new playlist
This method can be called from any thread. If the player is already playing
something it is interrupted
Args:
playlist str what to play
"""
with self.m_cv:
self.m_args = playlist
self.m_command = Playlist.NEW_PLAYLIST
self.m_cv.notify() |
Python | def stop_playing(self):
"""Tell the player to stop playing
This method can be called from any thread. If the player is not already playing
nothing happens
"""
with self.m_cv:
self.m_command = Playlist.STOP_PLAYING
self.m_cv.notify() | def stop_playing(self):
"""Tell the player to stop playing
This method can be called from any thread. If the player is not already playing
nothing happens
"""
with self.m_cv:
self.m_command = Playlist.STOP_PLAYING
self.m_cv.notify() |
Python | def exit(self):
"""Tell the player to stop playing and exit the controlling thread.
This method can be called from any thread. After calling exit no more commands
are listened to.
"""
with self.m_cv:
self.m_command = Playlist.EXIT
self.m_cv.notify() | def exit(self):
"""Tell the player to stop playing and exit the controlling thread.
This method can be called from any thread. After calling exit no more commands
are listened to.
"""
with self.m_cv:
self.m_command = Playlist.EXIT
self.m_cv.notify() |
Python | def _stop_playing(self):
"""Implementation of stopping the player process
Only called by Playlist.run()
"""
if self.m_player:
self.m_player.send_signal(Playlist.SIG_INT)
self.m_player = None | def _stop_playing(self):
"""Implementation of stopping the player process
Only called by Playlist.run()
"""
if self.m_player:
self.m_player.send_signal(Playlist.SIG_INT)
self.m_player = None |
Python | def _play(self, playlist):
"""Implementation of making a player process
Only called by Playlist.run()
Args:
playlist the directory name the player uses as a list
"""
self._stop_playing()
argv = [self.m_player_command, '-a', playlist]
self.m_player = subprocess.Popen(argv, stderr=subprocess.DEVNULL) | def _play(self, playlist):
"""Implementation of making a player process
Only called by Playlist.run()
Args:
playlist the directory name the player uses as a list
"""
self._stop_playing()
argv = [self.m_player_command, '-a', playlist]
self.m_player = subprocess.Popen(argv, stderr=subprocess.DEVNULL) |
Python | def parse_flags(args: object) -> object:
"""
Parses the input flags for a producer
"""
parser.add_argument('-in', help='tool results file')
parser.add_argument('-out', help='producer output file')
return parser.parse_args(args) | def parse_flags(args: object) -> object:
"""
Parses the input flags for a producer
"""
parser.add_argument('-in', help='tool results file')
parser.add_argument('-out', help='producer output file')
return parser.parse_args(args) |
Python | def parse_in_file_json(args: object) -> dict:
"""
A generic method to return a tool's JSON results file as a dict
"""
results_file = vars(args)['in']
with open(results_file) as f:
data = f.read()
return json.loads(data) | def parse_in_file_json(args: object) -> dict:
"""
A generic method to return a tool's JSON results file as a dict
"""
results_file = vars(args)['in']
with open(results_file) as f:
data = f.read()
return json.loads(data) |
Python | def write_dracon_out(args: object, tool_name: str, issues: [issue_pb2.Issue]):
"""
A method to write the resulting protobuf to the output file
"""
out_file = vars(args)['out']
source = __get_meta_source()
clean_issues = []
for iss in issues:
iss.description = iss.description.replace(__source_dir, ".")
iss.title = iss.title.replace(__source_dir, ".")
iss.target = iss.target.replace(__source_dir, ".")
iss.source = source
clean_issues.append(iss)
ltr = engine_pb2.LaunchToolResponse(
tool_name=tool_name,
issues=issues
)
with open(out_file, 'ab') as f:
f.write(ltr.SerializeToString()) | def write_dracon_out(args: object, tool_name: str, issues: [issue_pb2.Issue]):
"""
A method to write the resulting protobuf to the output file
"""
out_file = vars(args)['out']
source = __get_meta_source()
clean_issues = []
for iss in issues:
iss.description = iss.description.replace(__source_dir, ".")
iss.title = iss.title.replace(__source_dir, ".")
iss.target = iss.target.replace(__source_dir, ".")
iss.source = source
clean_issues.append(iss)
ltr = engine_pb2.LaunchToolResponse(
tool_name=tool_name,
issues=issues
)
with open(out_file, 'ab') as f:
f.write(ltr.SerializeToString()) |
Python | def __get_meta_source() -> str:
"""
This obtains the source address in the __meta_src_file from the source workspace
"""
meta_src_path = os.path.join(__source_dir, __meta_src_file)
if os.path.exists(meta_src_path):
with open(meta_src_path) as f:
return f.read().strip()
return "unknown" | def __get_meta_source() -> str:
"""
This obtains the source address in the __meta_src_file from the source workspace
"""
meta_src_path = os.path.join(__source_dir, __meta_src_file)
if os.path.exists(meta_src_path):
with open(meta_src_path) as f:
return f.read().strip()
return "unknown" |
Python | def upload_file_to_container(blob_service_client, container_name, file_path):
"""
Uploads a local file to an Azure Blob storage container.
:param block_blob_client: A blob service client.
:type block_blob_client: `azure.storage.blob.BlockBlobService`
:param str container_name: The name of the Azure Blob storage container.
:param str file_path: The local path to the file.
:rtype: `azure.batch.models.ResourceFile`
:return: A ResourceFile initialized with a SAS URL appropriate for Batch
tasks.
"""
blob_name = os.path.basename(file_path)
print('Uploading file {} to container [{}]...'.format(file_path,
container_name))
blob_client = blob_service_client.get_blob_client(container_name,blob_name)
with open(file_path, "rb") as data:
blob_client.upload_blob(data,overwrite=True)
#container_client = blob_service_client.get_container_client(container_name)
sas_token=generate_blob_sas(
account_name=blob_client.account_name,
container_name=blob_client.container_name,
blob_name=blob_name,
permission=BlobSasPermissions(read=True),
account_key=config._STORAGE_KEY,
expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=2)
)
sas_url = blob_client.url + '?' + sas_token
return batchmodels.ResourceFile(http_url=sas_url, file_path=blob_name) | def upload_file_to_container(blob_service_client, container_name, file_path):
"""
Uploads a local file to an Azure Blob storage container.
:param block_blob_client: A blob service client.
:type block_blob_client: `azure.storage.blob.BlockBlobService`
:param str container_name: The name of the Azure Blob storage container.
:param str file_path: The local path to the file.
:rtype: `azure.batch.models.ResourceFile`
:return: A ResourceFile initialized with a SAS URL appropriate for Batch
tasks.
"""
blob_name = os.path.basename(file_path)
print('Uploading file {} to container [{}]...'.format(file_path,
container_name))
blob_client = blob_service_client.get_blob_client(container_name,blob_name)
with open(file_path, "rb") as data:
blob_client.upload_blob(data,overwrite=True)
#container_client = blob_service_client.get_container_client(container_name)
sas_token=generate_blob_sas(
account_name=blob_client.account_name,
container_name=blob_client.container_name,
blob_name=blob_name,
permission=BlobSasPermissions(read=True),
account_key=config._STORAGE_KEY,
expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=2)
)
sas_url = blob_client.url + '?' + sas_token
return batchmodels.ResourceFile(http_url=sas_url, file_path=blob_name) |
Python | def read_global_config() -> dict[str, Any]:
"""Read global config and return as JSON."""
with open("config/global_config.json", encoding="UTF-8") as f:
config = json.loads(f.read())
return config | def read_global_config() -> dict[str, Any]:
"""Read global config and return as JSON."""
with open("config/global_config.json", encoding="UTF-8") as f:
config = json.loads(f.read())
return config |
Python | def draw_gradient(img):
'''
Fills the image with a gradient of 16 levels
of grayscale.
'''
for i in range(16):
color = i*0x10
box = (
i*img.width//16, # xmin
0, # ymin
(i+1)*img.width//16, # xmax
img.height # ymax
)
img.paste(color, box=box) | def draw_gradient(img):
'''
Fills the image with a gradient of 16 levels
of grayscale.
'''
for i in range(16):
color = i*0x10
box = (
i*img.width//16, # xmin
0, # ymin
(i+1)*img.width//16, # xmax
img.height # ymax
)
img.paste(color, box=box) |
Python | def load_img_area(self, buf, rotate_mode=constants.Rotate.NONE, xy=None, dims=None, pixel_format=None):
'''
Write the pixel data in buf (an array of bytes, 1 per pixel) to device memory.
This function does not actually display the image (see EPD.display_area).
Parameters
----------
buf : bytes
An array of bytes containing the pixel data
rotate_mode : constants.Rotate, optional
A rotation mode for the data to be pasted into device memory
xy : (int, int), optional
The x,y coordinates of the top-left corner of the area being pasted. If omitted,
the image is assumed to be the whole display area.
dims : (int, int), optional
The dimensions of the area being pasted. If xy is omitted (or set to None), the
dimensions are assumed to be the dimensions of the display area.
'''
endian_type = constants.EndianTypes.BIG
if pixel_format is None:
pixel_format = constants.PixelModes.M_4BPP
if xy is None:
self._load_img_start(endian_type, pixel_format, rotate_mode)
else:
self._load_img_area_start(endian_type, pixel_format, rotate_mode, xy, dims)
try:
bpp = {
PixelModes.M_2BPP : 2,
PixelModes.M_4BPP : 4,
PixelModes.M_8BPP : 8,
}[pixel_format]
except KeyError:
raise ValueError("invalid pixel format") from None
self.spi.pack_and_write_pixels(buf, bpp)
self._load_img_end() | def load_img_area(self, buf, rotate_mode=constants.Rotate.NONE, xy=None, dims=None, pixel_format=None):
'''
Write the pixel data in buf (an array of bytes, 1 per pixel) to device memory.
This function does not actually display the image (see EPD.display_area).
Parameters
----------
buf : bytes
An array of bytes containing the pixel data
rotate_mode : constants.Rotate, optional
A rotation mode for the data to be pasted into device memory
xy : (int, int), optional
The x,y coordinates of the top-left corner of the area being pasted. If omitted,
the image is assumed to be the whole display area.
dims : (int, int), optional
The dimensions of the area being pasted. If xy is omitted (or set to None), the
dimensions are assumed to be the dimensions of the display area.
'''
endian_type = constants.EndianTypes.BIG
if pixel_format is None:
pixel_format = constants.PixelModes.M_4BPP
if xy is None:
self._load_img_start(endian_type, pixel_format, rotate_mode)
else:
self._load_img_area_start(endian_type, pixel_format, rotate_mode, xy, dims)
try:
bpp = {
PixelModes.M_2BPP : 2,
PixelModes.M_4BPP : 4,
PixelModes.M_8BPP : 8,
}[pixel_format]
except KeyError:
raise ValueError("invalid pixel format") from None
self.spi.pack_and_write_pixels(buf, bpp)
self._load_img_end() |
Python | def display_area(self, xy, dims, display_mode):
'''
Update a portion of the display to whatever is currently stored in device memory
for that region. Updated data can be written to device memory using EPD.write_img_area
'''
self.spi.write_cmd(Commands.DPY_AREA, xy[0], xy[1], dims[0], dims[1], display_mode) | def display_area(self, xy, dims, display_mode):
'''
Update a portion of the display to whatever is currently stored in device memory
for that region. Updated data can be written to device memory using EPD.write_img_area
'''
self.spi.write_cmd(Commands.DPY_AREA, xy[0], xy[1], dims[0], dims[1], display_mode) |
Python | def update_system_info(self):
'''
Get information about the system, and store it in class attributes
'''
self.spi.write_cmd(Commands.GET_DEV_INFO)
data = self.spi.read_data(20)
if all(x == 0 for x in data):
raise RuntimeError("communication with device failed")
self.width = data[0]
self.height = data[1]
self.img_buf_address = data[3] << 16 | data[2]
self.firmware_version = ''.join([chr(x>>8)+chr(x&0xFF) for x in data[4:12]])
self.lut_version = ''.join([chr(x>>8)+chr(x&0xFF) for x in data[12:20]]) | def update_system_info(self):
'''
Get information about the system, and store it in class attributes
'''
self.spi.write_cmd(Commands.GET_DEV_INFO)
data = self.spi.read_data(20)
if all(x == 0 for x in data):
raise RuntimeError("communication with device failed")
self.width = data[0]
self.height = data[1]
self.img_buf_address = data[3] << 16 | data[2]
self.firmware_version = ''.join([chr(x>>8)+chr(x&0xFF) for x in data[4:12]])
self.lut_version = ''.join([chr(x>>8)+chr(x&0xFF) for x in data[12:20]]) |
Python | def place_text(img, text, x, y):
'''
Place some text on the image
'''
fontsize = 20
draw = ImageDraw.Draw(img)
font = ImageFont.truetype("/usr/share/fonts/truetype/freefont/FreeMono.ttf", fontsize)
draw.text((x, y), text, font=font) | def place_text(img, text, x, y):
'''
Place some text on the image
'''
fontsize = 20
draw = ImageDraw.Draw(img)
font = ImageFont.truetype("/usr/share/fonts/truetype/freefont/FreeMono.ttf", fontsize)
draw.text((x, y), text, font=font) |
Python | def _get_frame_buf(self):
'''
Return the frame buf, rotated according to flip. Always returns a copy, even
when rotate is None.
'''
if self._rotate_method is None:
return self.frame_buf.copy()
return self.frame_buf.transpose(self._rotate_method) | def _get_frame_buf(self):
'''
Return the frame buf, rotated according to flip. Always returns a copy, even
when rotate is None.
'''
if self._rotate_method is None:
return self.frame_buf.copy()
return self.frame_buf.transpose(self._rotate_method) |
Python | def draw_full(self, mode):
'''
Write the full image to the device, and display it using mode
'''
frame = self._get_frame_buf()
self.update(frame.tobytes(), (0,0), self.display_dims, mode)
if self.track_gray:
if mode == DisplayModes.DU:
diff_box = self._compute_diff_box(self.prev_frame, frame, round_to=8)
self.gray_change_bbox = self._merge_bbox(self.gray_change_bbox, diff_box)
else:
self.gray_change_bbox = None
self.prev_frame = frame | def draw_full(self, mode):
'''
Write the full image to the device, and display it using mode
'''
frame = self._get_frame_buf()
self.update(frame.tobytes(), (0,0), self.display_dims, mode)
if self.track_gray:
if mode == DisplayModes.DU:
diff_box = self._compute_diff_box(self.prev_frame, frame, round_to=8)
self.gray_change_bbox = self._merge_bbox(self.gray_change_bbox, diff_box)
else:
self.gray_change_bbox = None
self.prev_frame = frame |
Python | def draw_partial(self, mode):
'''
Write only the rectangle bounding the pixels of the image that have changed
since the last call to draw_full or draw_partial
'''
if self.prev_frame is None: # first call since initialization
self.draw_full(mode)
if mode in low_bpp_modes:
round_box = 8
else:
round_box = 4
frame = self._get_frame_buf()
# compute diff for this frame
diff_box = self._compute_diff_box(self.prev_frame, frame, round_to=round_box)
if self.track_gray:
self.gray_change_bbox = self._merge_bbox(self.gray_change_bbox, diff_box)
# reset grayscale changes to zero
if mode != DisplayModes.DU:
diff_box = self._round_bbox(self.gray_change_bbox, round_to=round_box)
self.gray_change_bbox = None
# if it is, nothing to do
if diff_box is not None:
buf = frame.crop(diff_box)
# if we are using a black/white only mode, any pixels that changed should be
# converted to black/white
if mode == DisplayModes.DU:
img_manip.make_changes_bw(frame.crop(diff_box), buf)
xy = (diff_box[0], diff_box[1])
dims = (diff_box[2]-diff_box[0], diff_box[3]-diff_box[1])
self.update(buf.tobytes(), xy, dims, mode)
self.prev_frame = frame | def draw_partial(self, mode):
'''
Write only the rectangle bounding the pixels of the image that have changed
since the last call to draw_full or draw_partial
'''
if self.prev_frame is None: # first call since initialization
self.draw_full(mode)
if mode in low_bpp_modes:
round_box = 8
else:
round_box = 4
frame = self._get_frame_buf()
# compute diff for this frame
diff_box = self._compute_diff_box(self.prev_frame, frame, round_to=round_box)
if self.track_gray:
self.gray_change_bbox = self._merge_bbox(self.gray_change_bbox, diff_box)
# reset grayscale changes to zero
if mode != DisplayModes.DU:
diff_box = self._round_bbox(self.gray_change_bbox, round_to=round_box)
self.gray_change_bbox = None
# if it is, nothing to do
if diff_box is not None:
buf = frame.crop(diff_box)
# if we are using a black/white only mode, any pixels that changed should be
# converted to black/white
if mode == DisplayModes.DU:
img_manip.make_changes_bw(frame.crop(diff_box), buf)
xy = (diff_box[0], diff_box[1])
dims = (diff_box[2]-diff_box[0], diff_box[3]-diff_box[1])
self.update(buf.tobytes(), xy, dims, mode)
self.prev_frame = frame |
Python | def clear(self):
'''
Clear display, device image buffer, and frame buffer (e.g. at startup)
'''
# set frame buffer to all white
self.frame_buf.paste(0xFF, box=(0, 0, self.width, self.height))
self.draw_full(DisplayModes.INIT) | def clear(self):
'''
Clear display, device image buffer, and frame buffer (e.g. at startup)
'''
# set frame buffer to all white
self.frame_buf.paste(0xFF, box=(0, 0, self.width, self.height))
self.draw_full(DisplayModes.INIT) |
Python | def _compute_diff_box(cls, a, b, round_to=2):
'''
Find the four coordinates giving the bounding box of differences between a and b
making sure they are divisible by round_to
Parameters
----------
a : PIL.Image
The first image
b : PIL.Image
The second image
round_to : int
The multiple to align the bbox to
'''
box = ImageChops.difference(a, b).getbbox()
if box is None:
return None
return cls._round_bbox(box, round_to) | def _compute_diff_box(cls, a, b, round_to=2):
'''
Find the four coordinates giving the bounding box of differences between a and b
making sure they are divisible by round_to
Parameters
----------
a : PIL.Image
The first image
b : PIL.Image
The second image
round_to : int
The multiple to align the bbox to
'''
box = ImageChops.difference(a, b).getbbox()
if box is None:
return None
return cls._round_bbox(box, round_to) |
Python | def _round_bbox(box, round_to=4):
'''
Round a bounding box so the edges are divisible by round_to
'''
minx, miny, maxx, maxy = box
minx -= minx%round_to
maxx += round_to-1 - (maxx-1)%round_to
miny -= miny%round_to
maxy += round_to-1 - (maxy-1)%round_to
return (minx, miny, maxx, maxy) | def _round_bbox(box, round_to=4):
'''
Round a bounding box so the edges are divisible by round_to
'''
minx, miny, maxx, maxy = box
minx -= minx%round_to
maxx += round_to-1 - (maxx-1)%round_to
miny -= miny%round_to
maxy += round_to-1 - (maxy-1)%round_to
return (minx, miny, maxx, maxy) |
Python | def _merge_bbox(a, b):
'''
Return a bounding box that contains both bboxes a and b
'''
if a is None:
return b
if b is None:
return a
minx = min(a[0], b[0])
miny = min(a[1], b[1])
maxx = max(a[2], b[2])
maxy = max(a[3], b[3])
return (minx, miny, maxx, maxy) | def _merge_bbox(a, b):
'''
Return a bounding box that contains both bboxes a and b
'''
if a is None:
return b
if b is None:
return a
minx = min(a[0], b[0])
miny = min(a[1], b[1])
maxx = max(a[2], b[2])
maxy = max(a[3], b[3])
return (minx, miny, maxx, maxy) |
Python | def _place_text(img, text, x_offset=0, y_offset=0):
'''
Put some centered text at a location on the image.
'''
fontsize = 80
draw = ImageDraw.Draw(img)
try:
font = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', fontsize)
except OSError:
font = ImageFont.truetype('/usr/share/fonts/TTF/DejaVuSans.ttf', fontsize)
img_width, img_height = img.size
text_width, _ = font.getsize(text)
text_height = fontsize
draw_x = (img_width - text_width)//2 + x_offset
draw_y = (img_height - text_height)//2 + y_offset
draw.text((draw_x, draw_y), text, font=font) | def _place_text(img, text, x_offset=0, y_offset=0):
'''
Put some centered text at a location on the image.
'''
fontsize = 80
draw = ImageDraw.Draw(img)
try:
font = ImageFont.truetype('/usr/share/fonts/truetype/freefont/FreeSans.ttf', fontsize)
except OSError:
font = ImageFont.truetype('/usr/share/fonts/TTF/DejaVuSans.ttf', fontsize)
img_width, img_height = img.size
text_width, _ = font.getsize(text)
text_height = fontsize
draw_x = (img_width - text_width)//2 + x_offset
draw_y = (img_height - text_height)//2 + y_offset
draw.text((draw_x, draw_y), text, font=font) |
Python | def find_first_repeated_freq(input_freqs, current_freq=0):
"""finds which frequency is reached twice first
the current frequency is found as the cummulative sum, the input frequencies
can be read more than once with the starting frequency being the last cummulative
frequency on the previous pass."""
found = False
set_of_sums = {current_freq}
while not found:
for freq in input_freqs:
current_freq += freq
if current_freq in set_of_sums:
found = True
break
set_of_sums.add(current_freq)
return current_freq | def find_first_repeated_freq(input_freqs, current_freq=0):
"""finds which frequency is reached twice first
the current frequency is found as the cummulative sum, the input frequencies
can be read more than once with the starting frequency being the last cummulative
frequency on the previous pass."""
found = False
set_of_sums = {current_freq}
while not found:
for freq in input_freqs:
current_freq += freq
if current_freq in set_of_sums:
found = True
break
set_of_sums.add(current_freq)
return current_freq |
Python | def values_generator(min_value: int = -32767, max_value: int = 32767):
"""Function to generate values for tests"""
for op in ops:
mkdir(op)
for elements_quantity in (500, 10**3, 10**4, 10**5, 5 * (10**5)):
with open(f'{op}/data({elements_quantity}).txt', "a") as inp:
for i in range(elements_quantity):
value = randint(min_value, max_value)
inp.write(str(value) + " ")
inp.write("\n") | def values_generator(min_value: int = -32767, max_value: int = 32767):
"""Function to generate values for tests"""
for op in ops:
mkdir(op)
for elements_quantity in (500, 10**3, 10**4, 10**5, 5 * (10**5)):
with open(f'{op}/data({elements_quantity}).txt', "a") as inp:
for i in range(elements_quantity):
value = randint(min_value, max_value)
inp.write(str(value) + " ")
inp.write("\n") |
Python | def PixelsToRaster(self, px, py, zoom):
"Move the origin of pixel coordinates to top-left corner"
mapSize = self.tileSize << zoom
return px, mapSize - py | def PixelsToRaster(self, px, py, zoom):
"Move the origin of pixel coordinates to top-left corner"
mapSize = self.tileSize << zoom
return px, mapSize - py |
Python | def TileBounds(self, tx, ty, zoom):
"Returns bounds of the given tile in EPSG:3857 coordinates"
minx, miny = self.PixelsToMeters(tx * self.tileSize, ty * self.tileSize, zoom)
maxx, maxy = self.PixelsToMeters((tx + 1) * self.tileSize, (ty + 1) * self.tileSize, zoom)
return (minx, miny, maxx, maxy) | def TileBounds(self, tx, ty, zoom):
"Returns bounds of the given tile in EPSG:3857 coordinates"
minx, miny = self.PixelsToMeters(tx * self.tileSize, ty * self.tileSize, zoom)
maxx, maxy = self.PixelsToMeters((tx + 1) * self.tileSize, (ty + 1) * self.tileSize, zoom)
return (minx, miny, maxx, maxy) |
Python | def TileLatLonBounds(self, tx, ty, zoom):
"Returns bounds of the given tile in latitude/longitude using WGS84 datum"
bounds = self.TileBounds(tx, ty, zoom)
minLat, minLon = self.MetersToLatLon(bounds[0], bounds[1])
maxLat, maxLon = self.MetersToLatLon(bounds[2], bounds[3])
return (minLat, minLon, maxLat, maxLon) | def TileLatLonBounds(self, tx, ty, zoom):
"Returns bounds of the given tile in latitude/longitude using WGS84 datum"
bounds = self.TileBounds(tx, ty, zoom)
minLat, minLon = self.MetersToLatLon(bounds[0], bounds[1])
maxLat, maxLon = self.MetersToLatLon(bounds[2], bounds[3])
return (minLat, minLon, maxLat, maxLon) |
Python | def ZoomForPixelSize(self, pixelSize):
"Maximal scaledown zoom of the pyramid closest to the pixelSize."
for i in range(MAXZOOMLEVEL):
if pixelSize > self.Resolution(i):
if i != -1:
return i - 1
else:
return 0 | def ZoomForPixelSize(self, pixelSize):
"Maximal scaledown zoom of the pyramid closest to the pixelSize."
for i in range(MAXZOOMLEVEL):
if pixelSize > self.Resolution(i):
if i != -1:
return i - 1
else:
return 0 |
Python | def QuadTree(self, tx, ty, zoom):
"Converts TMS tile coordinates to Microsoft QuadTree"
quadKey = ""
ty = (2**zoom - 1) - ty
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i - 1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quadKey += str(digit)
return quadKey | def QuadTree(self, tx, ty, zoom):
"Converts TMS tile coordinates to Microsoft QuadTree"
quadKey = ""
ty = (2**zoom - 1) - ty
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i - 1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quadKey += str(digit)
return quadKey |
Python | def LonLatToPixels(self, lon, lat, zoom):
"Converts lon/lat to pixel coordinates in given zoom of the EPSG:4326 pyramid"
res = self.resFact / 2**zoom
px = (180 + lon) / res
py = (90 + lat) / res
return px, py | def LonLatToPixels(self, lon, lat, zoom):
"Converts lon/lat to pixel coordinates in given zoom of the EPSG:4326 pyramid"
res = self.resFact / 2**zoom
px = (180 + lon) / res
py = (90 + lat) / res
return px, py |
Python | def TileBounds(self, tx, ty, zoom):
"Returns bounds of the given tile"
res = self.resFact / 2**zoom
return (
tx * self.tileSize * res - 180,
ty * self.tileSize * res - 90,
(tx + 1) * self.tileSize * res - 180,
(ty + 1) * self.tileSize * res - 90
) | def TileBounds(self, tx, ty, zoom):
"Returns bounds of the given tile"
res = self.resFact / 2**zoom
return (
tx * self.tileSize * res - 180,
ty * self.tileSize * res - 90,
(tx + 1) * self.tileSize * res - 180,
(ty + 1) * self.tileSize * res - 90
) |
Python | def tilefilename(self, x, y, z):
"""Returns filename for tile with given coordinates"""
tileIndex = x + y * self.tierSizeInTiles[z][0] + self.tileCountUpToTier[z]
return os.path.join("TileGroup%.0f" % math.floor(tileIndex / self.tilesize),
"%s-%s-%s.%s" % (z, x, y, self.tileformat)) | def tilefilename(self, x, y, z):
"""Returns filename for tile with given coordinates"""
tileIndex = x + y * self.tierSizeInTiles[z][0] + self.tileCountUpToTier[z]
return os.path.join("TileGroup%.0f" % math.floor(tileIndex / self.tilesize),
"%s-%s-%s.%s" % (z, x, y, self.tileformat)) |
Python | def generate_kml(tx, ty, tz, tileext, tilesize, tileswne, options, children=None, **args):
"""
Template for the KML. Returns filled string.
"""
if not children:
children = []
args['tx'], args['ty'], args['tz'] = tx, ty, tz
args['tileformat'] = tileext
if 'tilesize' not in args:
args['tilesize'] = tilesize
if 'minlodpixels' not in args:
args['minlodpixels'] = int(args['tilesize'] / 2)
if 'maxlodpixels' not in args:
args['maxlodpixels'] = int(args['tilesize'] * 8)
if children == []:
args['maxlodpixels'] = -1
if tx is None:
tilekml = False
args['title'] = options.title
else:
tilekml = True
args['title'] = "%d/%d/%d.kml" % (tz, tx, ty)
args['south'], args['west'], args['north'], args['east'] = tileswne(tx, ty, tz)
if tx == 0:
args['drawOrder'] = 2 * tz + 1
elif tx is not None:
args['drawOrder'] = 2 * tz
else:
args['drawOrder'] = 0
url = options.url
if not url:
if tilekml:
url = "../../"
else:
url = ""
s = """<?xml version="1.0" encoding="utf-8"?>
<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>%(title)s</name>
<description></description>
<Style>
<ListStyle id="hideChildren">
<listItemType>checkHideChildren</listItemType>
</ListStyle>
</Style>""" % args
if tilekml:
s += """
<Region>
<LatLonAltBox>
<north>%(north).14f</north>
<south>%(south).14f</south>
<east>%(east).14f</east>
<west>%(west).14f</west>
</LatLonAltBox>
<Lod>
<minLodPixels>%(minlodpixels)d</minLodPixels>
<maxLodPixels>%(maxlodpixels)d</maxLodPixels>
</Lod>
</Region>
<GroundOverlay>
<drawOrder>%(drawOrder)d</drawOrder>
<Icon>
<href>%(ty)d.%(tileformat)s</href>
</Icon>
<LatLonBox>
<north>%(north).14f</north>
<south>%(south).14f</south>
<east>%(east).14f</east>
<west>%(west).14f</west>
</LatLonBox>
</GroundOverlay>
""" % args
for cx, cy, cz in children:
csouth, cwest, cnorth, ceast = tileswne(cx, cy, cz)
s += """
<NetworkLink>
<name>%d/%d/%d.%s</name>
<Region>
<LatLonAltBox>
<north>%.14f</north>
<south>%.14f</south>
<east>%.14f</east>
<west>%.14f</west>
</LatLonAltBox>
<Lod>
<minLodPixels>%d</minLodPixels>
<maxLodPixels>-1</maxLodPixels>
</Lod>
</Region>
<Link>
<href>%s%d/%d/%d.kml</href>
<viewRefreshMode>onRegion</viewRefreshMode>
<viewFormat/>
</Link>
</NetworkLink>
""" % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest,
args['minlodpixels'], url, cz, cx, cy)
s += """ </Document>
</kml>
"""
return s | def generate_kml(tx, ty, tz, tileext, tilesize, tileswne, options, children=None, **args):
"""
Template for the KML. Returns filled string.
"""
if not children:
children = []
args['tx'], args['ty'], args['tz'] = tx, ty, tz
args['tileformat'] = tileext
if 'tilesize' not in args:
args['tilesize'] = tilesize
if 'minlodpixels' not in args:
args['minlodpixels'] = int(args['tilesize'] / 2)
if 'maxlodpixels' not in args:
args['maxlodpixels'] = int(args['tilesize'] * 8)
if children == []:
args['maxlodpixels'] = -1
if tx is None:
tilekml = False
args['title'] = options.title
else:
tilekml = True
args['title'] = "%d/%d/%d.kml" % (tz, tx, ty)
args['south'], args['west'], args['north'], args['east'] = tileswne(tx, ty, tz)
if tx == 0:
args['drawOrder'] = 2 * tz + 1
elif tx is not None:
args['drawOrder'] = 2 * tz
else:
args['drawOrder'] = 0
url = options.url
if not url:
if tilekml:
url = "../../"
else:
url = ""
s = """<?xml version="1.0" encoding="utf-8"?>
<kml xmlns="http://www.opengis.net/kml/2.2">
<Document>
<name>%(title)s</name>
<description></description>
<Style>
<ListStyle id="hideChildren">
<listItemType>checkHideChildren</listItemType>
</ListStyle>
</Style>""" % args
if tilekml:
s += """
<Region>
<LatLonAltBox>
<north>%(north).14f</north>
<south>%(south).14f</south>
<east>%(east).14f</east>
<west>%(west).14f</west>
</LatLonAltBox>
<Lod>
<minLodPixels>%(minlodpixels)d</minLodPixels>
<maxLodPixels>%(maxlodpixels)d</maxLodPixels>
</Lod>
</Region>
<GroundOverlay>
<drawOrder>%(drawOrder)d</drawOrder>
<Icon>
<href>%(ty)d.%(tileformat)s</href>
</Icon>
<LatLonBox>
<north>%(north).14f</north>
<south>%(south).14f</south>
<east>%(east).14f</east>
<west>%(west).14f</west>
</LatLonBox>
</GroundOverlay>
""" % args
for cx, cy, cz in children:
csouth, cwest, cnorth, ceast = tileswne(cx, cy, cz)
s += """
<NetworkLink>
<name>%d/%d/%d.%s</name>
<Region>
<LatLonAltBox>
<north>%.14f</north>
<south>%.14f</south>
<east>%.14f</east>
<west>%.14f</west>
</LatLonAltBox>
<Lod>
<minLodPixels>%d</minLodPixels>
<maxLodPixels>-1</maxLodPixels>
</Lod>
</Region>
<Link>
<href>%s%d/%d/%d.kml</href>
<viewRefreshMode>onRegion</viewRefreshMode>
<viewFormat/>
</Link>
</NetworkLink>
""" % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest,
args['minlodpixels'], url, cz, cx, cy)
s += """ </Document>
</kml>
"""
return s |
Python | def scale_query_to_tile(dsquery, dstile, tiledriver, options, tilefilename=''):
"""Scales down query dataset to the tile dataset"""
querysize = dsquery.RasterXSize
tilesize = dstile.RasterXSize
tilebands = dstile.RasterCount
if options.resampling == 'average':
# Function: gdal.RegenerateOverview()
for i in range(1, tilebands + 1):
# Black border around NODATA
res = gdal.RegenerateOverview(dsquery.GetRasterBand(i), dstile.GetRasterBand(i),
'average')
if res != 0:
exit_with_error("RegenerateOverview() failed on %s, error %d" % (
tilefilename, res))
elif options.resampling == 'antialias':
# Scaling by PIL (Python Imaging Library) - improved Lanczos
array = numpy.zeros((querysize, querysize, tilebands), numpy.uint8)
for i in range(tilebands):
array[:, :, i] = gdalarray.BandReadAsArray(dsquery.GetRasterBand(i + 1),
0, 0, querysize, querysize)
im = Image.fromarray(array, 'RGBA') # Always four bands
im1 = im.resize((tilesize, tilesize), Image.ANTIALIAS)
if os.path.exists(tilefilename):
im0 = Image.open(tilefilename)
im1 = Image.composite(im1, im0, im1)
im1.save(tilefilename, tiledriver)
else:
if options.resampling == 'near':
gdal_resampling = gdal.GRA_NearestNeighbour
elif options.resampling == 'bilinear':
gdal_resampling = gdal.GRA_Bilinear
elif options.resampling == 'cubic':
gdal_resampling = gdal.GRA_Cubic
elif options.resampling == 'cubicspline':
gdal_resampling = gdal.GRA_CubicSpline
elif options.resampling == 'lanczos':
gdal_resampling = gdal.GRA_Lanczos
# Other algorithms are implemented by gdal.ReprojectImage().
dsquery.SetGeoTransform((0.0, tilesize / float(querysize), 0.0, 0.0, 0.0,
tilesize / float(querysize)))
dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0))
res = gdal.ReprojectImage(dsquery, dstile, None, None, gdal_resampling)
if res != 0:
exit_with_error("ReprojectImage() failed on %s, error %d" % (tilefilename, res)) | def scale_query_to_tile(dsquery, dstile, tiledriver, options, tilefilename=''):
"""Scales down query dataset to the tile dataset"""
querysize = dsquery.RasterXSize
tilesize = dstile.RasterXSize
tilebands = dstile.RasterCount
if options.resampling == 'average':
# Function: gdal.RegenerateOverview()
for i in range(1, tilebands + 1):
# Black border around NODATA
res = gdal.RegenerateOverview(dsquery.GetRasterBand(i), dstile.GetRasterBand(i),
'average')
if res != 0:
exit_with_error("RegenerateOverview() failed on %s, error %d" % (
tilefilename, res))
elif options.resampling == 'antialias':
# Scaling by PIL (Python Imaging Library) - improved Lanczos
array = numpy.zeros((querysize, querysize, tilebands), numpy.uint8)
for i in range(tilebands):
array[:, :, i] = gdalarray.BandReadAsArray(dsquery.GetRasterBand(i + 1),
0, 0, querysize, querysize)
im = Image.fromarray(array, 'RGBA') # Always four bands
im1 = im.resize((tilesize, tilesize), Image.ANTIALIAS)
if os.path.exists(tilefilename):
im0 = Image.open(tilefilename)
im1 = Image.composite(im1, im0, im1)
im1.save(tilefilename, tiledriver)
else:
if options.resampling == 'near':
gdal_resampling = gdal.GRA_NearestNeighbour
elif options.resampling == 'bilinear':
gdal_resampling = gdal.GRA_Bilinear
elif options.resampling == 'cubic':
gdal_resampling = gdal.GRA_Cubic
elif options.resampling == 'cubicspline':
gdal_resampling = gdal.GRA_CubicSpline
elif options.resampling == 'lanczos':
gdal_resampling = gdal.GRA_Lanczos
# Other algorithms are implemented by gdal.ReprojectImage().
dsquery.SetGeoTransform((0.0, tilesize / float(querysize), 0.0, 0.0, 0.0,
tilesize / float(querysize)))
dstile.SetGeoTransform((0.0, 1.0, 0.0, 0.0, 0.0, 1.0))
res = gdal.ReprojectImage(dsquery, dstile, None, None, gdal_resampling)
if res != 0:
exit_with_error("ReprojectImage() failed on %s, error %d" % (tilefilename, res)) |
Python | def reproject_dataset(from_dataset, from_srs, to_srs, options=None):
"""
Returns the input dataset in the expected "destination" SRS.
If the dataset is already in the correct SRS, returns it unmodified
"""
if not from_srs or not to_srs:
raise GDALError("from and to SRS must be defined to reproject the dataset")
if (from_srs.ExportToProj4() != to_srs.ExportToProj4()) or (from_dataset.GetGCPCount() != 0):
to_dataset = gdal.AutoCreateWarpedVRT(from_dataset,
from_srs.ExportToWkt(), to_srs.ExportToWkt())
if options and options.verbose:
print("Warping of the raster by AutoCreateWarpedVRT (result saved into 'tiles.vrt')")
to_dataset.GetDriver().CreateCopy("tiles.vrt", to_dataset)
return to_dataset
else:
return from_dataset | def reproject_dataset(from_dataset, from_srs, to_srs, options=None):
"""
Returns the input dataset in the expected "destination" SRS.
If the dataset is already in the correct SRS, returns it unmodified
"""
if not from_srs or not to_srs:
raise GDALError("from and to SRS must be defined to reproject the dataset")
if (from_srs.ExportToProj4() != to_srs.ExportToProj4()) or (from_dataset.GetGCPCount() != 0):
to_dataset = gdal.AutoCreateWarpedVRT(from_dataset,
from_srs.ExportToWkt(), to_srs.ExportToWkt())
if options and options.verbose:
print("Warping of the raster by AutoCreateWarpedVRT (result saved into 'tiles.vrt')")
to_dataset.GetDriver().CreateCopy("tiles.vrt", to_dataset)
return to_dataset
else:
return from_dataset |
Python | def update_no_data_values(warped_vrt_dataset, nodata_values, options=None):
"""
Takes an array of NODATA values and forces them on the WarpedVRT file dataset passed
"""
# TODO: gbataille - Seems that I forgot tests there
if nodata_values != []:
temp_file = gettempfilename('-gdal2tiles.vrt')
warped_vrt_dataset.GetDriver().CreateCopy(temp_file, warped_vrt_dataset)
with open(temp_file, 'r') as f:
vrt_string = f.read()
vrt_string = add_gdal_warp_options_to_string(
vrt_string, {"INIT_DEST": "NO_DATA", "UNIFIED_SRC_NODATA": "YES"})
# TODO: gbataille - check the need for this replacement. Seems to work without
# # replace BandMapping tag for NODATA bands....
# for i in range(len(nodata_values)):
# s = s.replace(
# '<BandMapping src="%i" dst="%i"/>' % ((i+1), (i+1)),
# """
# <BandMapping src="%i" dst="%i">
# <SrcNoDataReal>%i</SrcNoDataReal>
# <SrcNoDataImag>0</SrcNoDataImag>
# <DstNoDataReal>%i</DstNoDataReal>
# <DstNoDataImag>0</DstNoDataImag>
# </BandMapping>
# """ % ((i+1), (i+1), nodata_values[i], nodata_values[i]))
# save the corrected VRT
with open(temp_file, 'w') as f:
f.write(vrt_string)
corrected_dataset = gdal.Open(temp_file)
os.unlink(temp_file)
# set NODATA_VALUE metadata
corrected_dataset.SetMetadataItem(
'NODATA_VALUES', ' '.join([str(i) for i in nodata_values]))
if options and options.verbose:
print("Modified warping result saved into 'tiles1.vrt'")
# TODO: gbataille - test replacing that with a gdal write of the dataset (more
# accurately what's used, even if should be the same
with open("tiles1.vrt", "w") as f:
f.write(vrt_string)
return corrected_dataset | def update_no_data_values(warped_vrt_dataset, nodata_values, options=None):
"""
Takes an array of NODATA values and forces them on the WarpedVRT file dataset passed
"""
# TODO: gbataille - Seems that I forgot tests there
if nodata_values != []:
temp_file = gettempfilename('-gdal2tiles.vrt')
warped_vrt_dataset.GetDriver().CreateCopy(temp_file, warped_vrt_dataset)
with open(temp_file, 'r') as f:
vrt_string = f.read()
vrt_string = add_gdal_warp_options_to_string(
vrt_string, {"INIT_DEST": "NO_DATA", "UNIFIED_SRC_NODATA": "YES"})
# TODO: gbataille - check the need for this replacement. Seems to work without
# # replace BandMapping tag for NODATA bands....
# for i in range(len(nodata_values)):
# s = s.replace(
# '<BandMapping src="%i" dst="%i"/>' % ((i+1), (i+1)),
# """
# <BandMapping src="%i" dst="%i">
# <SrcNoDataReal>%i</SrcNoDataReal>
# <SrcNoDataImag>0</SrcNoDataImag>
# <DstNoDataReal>%i</DstNoDataReal>
# <DstNoDataImag>0</DstNoDataImag>
# </BandMapping>
# """ % ((i+1), (i+1), nodata_values[i], nodata_values[i]))
# save the corrected VRT
with open(temp_file, 'w') as f:
f.write(vrt_string)
corrected_dataset = gdal.Open(temp_file)
os.unlink(temp_file)
# set NODATA_VALUE metadata
corrected_dataset.SetMetadataItem(
'NODATA_VALUES', ' '.join([str(i) for i in nodata_values]))
if options and options.verbose:
print("Modified warping result saved into 'tiles1.vrt'")
# TODO: gbataille - test replacing that with a gdal write of the dataset (more
# accurately what's used, even if should be the same
with open("tiles1.vrt", "w") as f:
f.write(vrt_string)
return corrected_dataset |
Python | def update_alpha_value_for_non_alpha_inputs(warped_vrt_dataset, options=None):
"""
Handles dataset with 1 or 3 bands, i.e. without alpha channel, in the case the nodata value has
not been forced by options
"""
if warped_vrt_dataset.RasterCount in [1, 3]:
tempfilename = gettempfilename('-gdal2tiles.vrt')
warped_vrt_dataset.GetDriver().CreateCopy(tempfilename, warped_vrt_dataset)
with open(tempfilename) as f:
orig_data = f.read()
alpha_data = add_alpha_band_to_string_vrt(orig_data)
with open(tempfilename, 'w') as f:
f.write(alpha_data)
warped_vrt_dataset = gdal.Open(tempfilename)
os.unlink(tempfilename)
if options and options.verbose:
print("Modified -dstalpha warping result saved into 'tiles1.vrt'")
# TODO: gbataille - test replacing that with a gdal write of the dataset (more
# accurately what's used, even if should be the same
with open("tiles1.vrt", "w") as f:
f.write(alpha_data)
return warped_vrt_dataset | def update_alpha_value_for_non_alpha_inputs(warped_vrt_dataset, options=None):
"""
Handles dataset with 1 or 3 bands, i.e. without alpha channel, in the case the nodata value has
not been forced by options
"""
if warped_vrt_dataset.RasterCount in [1, 3]:
tempfilename = gettempfilename('-gdal2tiles.vrt')
warped_vrt_dataset.GetDriver().CreateCopy(tempfilename, warped_vrt_dataset)
with open(tempfilename) as f:
orig_data = f.read()
alpha_data = add_alpha_band_to_string_vrt(orig_data)
with open(tempfilename, 'w') as f:
f.write(alpha_data)
warped_vrt_dataset = gdal.Open(tempfilename)
os.unlink(tempfilename)
if options and options.verbose:
print("Modified -dstalpha warping result saved into 'tiles1.vrt'")
# TODO: gbataille - test replacing that with a gdal write of the dataset (more
# accurately what's used, even if should be the same
with open("tiles1.vrt", "w") as f:
f.write(alpha_data)
return warped_vrt_dataset |
Python | def nb_data_bands(dataset):
"""
Return the number of data (non-alpha) bands of a gdal dataset
"""
alphaband = dataset.GetRasterBand(1).GetMaskBand()
if ((alphaband.GetMaskFlags() & gdal.GMF_ALPHA) or
dataset.RasterCount == 4 or
dataset.RasterCount == 2):
return dataset.RasterCount - 1
else:
return dataset.RasterCount | def nb_data_bands(dataset):
"""
Return the number of data (non-alpha) bands of a gdal dataset
"""
alphaband = dataset.GetRasterBand(1).GetMaskBand()
if ((alphaband.GetMaskFlags() & gdal.GMF_ALPHA) or
dataset.RasterCount == 4 or
dataset.RasterCount == 2):
return dataset.RasterCount - 1
else:
return dataset.RasterCount |
Python | def create_overview_tiles(tile_job_info, output_folder, options):
"""Generation of the overview tiles (higher in the pyramid) based on existing tiles"""
mem_driver = gdal.GetDriverByName('MEM')
tile_driver = tile_job_info.tile_driver
out_driver = gdal.GetDriverByName(tile_driver)
tilebands = tile_job_info.nb_data_bands + 1
# Usage of existing tiles: from 4 underlying tiles generate one as overview.
tcount = 0
for tz in range(tile_job_info.tmaxz - 1, tile_job_info.tminz - 1, -1):
tminx, tminy, tmaxx, tmaxy = tile_job_info.tminmax[tz]
tcount += (1 + abs(tmaxx - tminx)) * (1 + abs(tmaxy - tminy))
ti = 0
if tcount == 0:
return
if not options.quiet:
print("Generating Overview Tiles:")
progress_bar = ProgressBar(tcount)
progress_bar.start()
for tz in range(tile_job_info.tmaxz - 1, tile_job_info.tminz - 1, -1):
tminx, tminy, tmaxx, tmaxy = tile_job_info.tminmax[tz]
for ty in range(tmaxy, tminy - 1, -1):
for tx in range(tminx, tmaxx + 1):
ti += 1
tilefilename = os.path.join(output_folder,
str(tz),
str(tx),
"%s.%s" % (ty, tile_job_info.tile_extension))
if options.verbose:
print(ti, '/', tcount, tilefilename)
if options.resume and os.path.exists(tilefilename):
if options.verbose:
print("Tile generation skipped because of --resume")
else:
progress_bar.log_progress()
continue
# Create directories for the tile
if not os.path.exists(os.path.dirname(tilefilename)):
os.makedirs(os.path.dirname(tilefilename))
dsquery = mem_driver.Create('', 2 * tile_job_info.tile_size,
2 * tile_job_info.tile_size, tilebands)
# TODO: fill the null value
dstile = mem_driver.Create('', tile_job_info.tile_size, tile_job_info.tile_size,
tilebands)
# TODO: Implement more clever walking on the tiles with cache functionality
# probably walk should start with reading of four tiles from top left corner
# Hilbert curve
children = []
# Read the tiles and write them to query window
for y in range(2 * ty, 2 * ty + 2):
for x in range(2 * tx, 2 * tx + 2):
minx, miny, maxx, maxy = tile_job_info.tminmax[tz + 1]
if x >= minx and x <= maxx and y >= miny and y <= maxy:
dsquerytile = gdal.Open(
os.path.join(output_folder, str(tz + 1), str(x),
"%s.%s" % (y, tile_job_info.tile_extension)),
gdal.GA_ReadOnly)
if (ty == 0 and y == 1) or (ty != 0 and (y % (2 * ty)) != 0):
tileposy = 0
else:
tileposy = tile_job_info.tile_size
if tx:
tileposx = x % (2 * tx) * tile_job_info.tile_size
elif tx == 0 and x == 1:
tileposx = tile_job_info.tile_size
else:
tileposx = 0
dsquery.WriteRaster(
tileposx, tileposy, tile_job_info.tile_size,
tile_job_info.tile_size,
dsquerytile.ReadRaster(0, 0,
tile_job_info.tile_size,
tile_job_info.tile_size),
band_list=list(range(1, tilebands + 1)))
children.append([x, y, tz + 1])
scale_query_to_tile(dsquery, dstile, tile_driver, options,
tilefilename=tilefilename)
# Write a copy of tile to png/jpg
if options.resampling != 'antialias':
# Write a copy of tile to png/jpg
out_driver.CreateCopy(tilefilename, dstile, strict=0)
if options.verbose:
print("\tbuild from zoom", tz + 1,
" tiles:", (2 * tx, 2 * ty), (2 * tx + 1, 2 * ty),
(2 * tx, 2 * ty + 1), (2 * tx + 1, 2 * ty + 1))
# Create a KML file for this tile.
if tile_job_info.kml:
with open(os.path.join(
output_folder,
'%d/%d/%d.kml' % (tz, tx, ty)
), 'wb') as f:
f.write(generate_kml(
tx, ty, tz, tile_job_info.tile_extension, tile_job_info.tile_size,
get_tile_swne(tile_job_info, options), options, children
).encode('utf-8'))
if not options.verbose and not options.quiet:
progress_bar.log_progress() | def create_overview_tiles(tile_job_info, output_folder, options):
"""Generation of the overview tiles (higher in the pyramid) based on existing tiles"""
mem_driver = gdal.GetDriverByName('MEM')
tile_driver = tile_job_info.tile_driver
out_driver = gdal.GetDriverByName(tile_driver)
tilebands = tile_job_info.nb_data_bands + 1
# Usage of existing tiles: from 4 underlying tiles generate one as overview.
tcount = 0
for tz in range(tile_job_info.tmaxz - 1, tile_job_info.tminz - 1, -1):
tminx, tminy, tmaxx, tmaxy = tile_job_info.tminmax[tz]
tcount += (1 + abs(tmaxx - tminx)) * (1 + abs(tmaxy - tminy))
ti = 0
if tcount == 0:
return
if not options.quiet:
print("Generating Overview Tiles:")
progress_bar = ProgressBar(tcount)
progress_bar.start()
for tz in range(tile_job_info.tmaxz - 1, tile_job_info.tminz - 1, -1):
tminx, tminy, tmaxx, tmaxy = tile_job_info.tminmax[tz]
for ty in range(tmaxy, tminy - 1, -1):
for tx in range(tminx, tmaxx + 1):
ti += 1
tilefilename = os.path.join(output_folder,
str(tz),
str(tx),
"%s.%s" % (ty, tile_job_info.tile_extension))
if options.verbose:
print(ti, '/', tcount, tilefilename)
if options.resume and os.path.exists(tilefilename):
if options.verbose:
print("Tile generation skipped because of --resume")
else:
progress_bar.log_progress()
continue
# Create directories for the tile
if not os.path.exists(os.path.dirname(tilefilename)):
os.makedirs(os.path.dirname(tilefilename))
dsquery = mem_driver.Create('', 2 * tile_job_info.tile_size,
2 * tile_job_info.tile_size, tilebands)
# TODO: fill the null value
dstile = mem_driver.Create('', tile_job_info.tile_size, tile_job_info.tile_size,
tilebands)
# TODO: Implement more clever walking on the tiles with cache functionality
# probably walk should start with reading of four tiles from top left corner
# Hilbert curve
children = []
# Read the tiles and write them to query window
for y in range(2 * ty, 2 * ty + 2):
for x in range(2 * tx, 2 * tx + 2):
minx, miny, maxx, maxy = tile_job_info.tminmax[tz + 1]
if x >= minx and x <= maxx and y >= miny and y <= maxy:
dsquerytile = gdal.Open(
os.path.join(output_folder, str(tz + 1), str(x),
"%s.%s" % (y, tile_job_info.tile_extension)),
gdal.GA_ReadOnly)
if (ty == 0 and y == 1) or (ty != 0 and (y % (2 * ty)) != 0):
tileposy = 0
else:
tileposy = tile_job_info.tile_size
if tx:
tileposx = x % (2 * tx) * tile_job_info.tile_size
elif tx == 0 and x == 1:
tileposx = tile_job_info.tile_size
else:
tileposx = 0
dsquery.WriteRaster(
tileposx, tileposy, tile_job_info.tile_size,
tile_job_info.tile_size,
dsquerytile.ReadRaster(0, 0,
tile_job_info.tile_size,
tile_job_info.tile_size),
band_list=list(range(1, tilebands + 1)))
children.append([x, y, tz + 1])
scale_query_to_tile(dsquery, dstile, tile_driver, options,
tilefilename=tilefilename)
# Write a copy of tile to png/jpg
if options.resampling != 'antialias':
# Write a copy of tile to png/jpg
out_driver.CreateCopy(tilefilename, dstile, strict=0)
if options.verbose:
print("\tbuild from zoom", tz + 1,
" tiles:", (2 * tx, 2 * ty), (2 * tx + 1, 2 * ty),
(2 * tx, 2 * ty + 1), (2 * tx + 1, 2 * ty + 1))
# Create a KML file for this tile.
if tile_job_info.kml:
with open(os.path.join(
output_folder,
'%d/%d/%d.kml' % (tz, tx, ty)
), 'wb') as f:
f.write(generate_kml(
tx, ty, tz, tile_job_info.tile_extension, tile_job_info.tile_size,
get_tile_swne(tile_job_info, options), options, children
).encode('utf-8'))
if not options.verbose and not options.quiet:
progress_bar.log_progress() |
Python | def generate_metadata(self):
"""
Generation of main metadata files and HTML viewers (metadata related to particular
tiles are generated during the tile processing).
"""
if not os.path.exists(self.output_folder):
os.makedirs(self.output_folder)
if self.options.profile == 'mercator':
south, west = self.mercator.MetersToLatLon(self.ominx, self.ominy)
north, east = self.mercator.MetersToLatLon(self.omaxx, self.omaxy)
south, west = max(-85.05112878, south), max(-180.0, west)
north, east = min(85.05112878, north), min(180.0, east)
self.swne = (south, west, north, east)
# Generate googlemaps.html
if self.options.webviewer in ('all', 'google') and self.options.profile == 'mercator':
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'googlemaps.html'))):
with open(os.path.join(self.output_folder, 'googlemaps.html'), 'wb') as f:
f.write(self.generate_googlemaps().encode('utf-8'))
# Generate openlayers.html
if self.options.webviewer in ('all', 'openlayers'):
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))):
with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f:
f.write(self.generate_openlayers().encode('utf-8'))
# Generate leaflet.html
if self.options.webviewer in ('all', 'leaflet'):
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'leaflet.html'))):
with open(os.path.join(self.output_folder, 'leaflet.html'), 'wb') as f:
f.write(self.generate_leaflet().encode('utf-8'))
elif self.options.profile == 'geodetic':
west, south = self.ominx, self.ominy
east, north = self.omaxx, self.omaxy
south, west = max(-90.0, south), max(-180.0, west)
north, east = min(90.0, north), min(180.0, east)
self.swne = (south, west, north, east)
# Generate openlayers.html
if self.options.webviewer in ('all', 'openlayers'):
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))):
with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f:
f.write(self.generate_openlayers().encode('utf-8'))
elif self.options.profile == 'raster':
west, south = self.ominx, self.ominy
east, north = self.omaxx, self.omaxy
self.swne = (south, west, north, east)
# Generate openlayers.html
if self.options.webviewer in ('all', 'openlayers'):
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))):
with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f:
f.write(self.generate_openlayers().encode('utf-8'))
# Generate tilemapresource.xml.
if not self.options.resume or not os.path.exists(os.path.join(self.output_folder, 'tilemapresource.xml')):
with open(os.path.join(self.output_folder, 'tilemapresource.xml'), 'wb') as f:
f.write(self.generate_tilemapresource().encode('utf-8'))
if self.kml:
# TODO: Maybe problem for not automatically generated tminz
# The root KML should contain links to all tiles in the tminz level
children = []
xmin, ymin, xmax, ymax = self.tminmax[self.tminz]
for x in range(xmin, xmax + 1):
for y in range(ymin, ymax + 1):
children.append([x, y, self.tminz])
# Generate Root KML
if self.kml:
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'doc.kml'))):
with open(os.path.join(self.output_folder, 'doc.kml'), 'wb') as f:
f.write(generate_kml(
None, None, None, self.tileext, self.tilesize, self.tileswne,
self.options, children
).encode('utf-8')) | def generate_metadata(self):
"""
Generation of main metadata files and HTML viewers (metadata related to particular
tiles are generated during the tile processing).
"""
if not os.path.exists(self.output_folder):
os.makedirs(self.output_folder)
if self.options.profile == 'mercator':
south, west = self.mercator.MetersToLatLon(self.ominx, self.ominy)
north, east = self.mercator.MetersToLatLon(self.omaxx, self.omaxy)
south, west = max(-85.05112878, south), max(-180.0, west)
north, east = min(85.05112878, north), min(180.0, east)
self.swne = (south, west, north, east)
# Generate googlemaps.html
if self.options.webviewer in ('all', 'google') and self.options.profile == 'mercator':
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'googlemaps.html'))):
with open(os.path.join(self.output_folder, 'googlemaps.html'), 'wb') as f:
f.write(self.generate_googlemaps().encode('utf-8'))
# Generate openlayers.html
if self.options.webviewer in ('all', 'openlayers'):
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))):
with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f:
f.write(self.generate_openlayers().encode('utf-8'))
# Generate leaflet.html
if self.options.webviewer in ('all', 'leaflet'):
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'leaflet.html'))):
with open(os.path.join(self.output_folder, 'leaflet.html'), 'wb') as f:
f.write(self.generate_leaflet().encode('utf-8'))
elif self.options.profile == 'geodetic':
west, south = self.ominx, self.ominy
east, north = self.omaxx, self.omaxy
south, west = max(-90.0, south), max(-180.0, west)
north, east = min(90.0, north), min(180.0, east)
self.swne = (south, west, north, east)
# Generate openlayers.html
if self.options.webviewer in ('all', 'openlayers'):
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))):
with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f:
f.write(self.generate_openlayers().encode('utf-8'))
elif self.options.profile == 'raster':
west, south = self.ominx, self.ominy
east, north = self.omaxx, self.omaxy
self.swne = (south, west, north, east)
# Generate openlayers.html
if self.options.webviewer in ('all', 'openlayers'):
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'openlayers.html'))):
with open(os.path.join(self.output_folder, 'openlayers.html'), 'wb') as f:
f.write(self.generate_openlayers().encode('utf-8'))
# Generate tilemapresource.xml.
if not self.options.resume or not os.path.exists(os.path.join(self.output_folder, 'tilemapresource.xml')):
with open(os.path.join(self.output_folder, 'tilemapresource.xml'), 'wb') as f:
f.write(self.generate_tilemapresource().encode('utf-8'))
if self.kml:
# TODO: Maybe problem for not automatically generated tminz
# The root KML should contain links to all tiles in the tminz level
children = []
xmin, ymin, xmax, ymax = self.tminmax[self.tminz]
for x in range(xmin, xmax + 1):
for y in range(ymin, ymax + 1):
children.append([x, y, self.tminz])
# Generate Root KML
if self.kml:
if (not self.options.resume or not
os.path.exists(os.path.join(self.output_folder, 'doc.kml'))):
with open(os.path.join(self.output_folder, 'doc.kml'), 'wb') as f:
f.write(generate_kml(
None, None, None, self.tileext, self.tilesize, self.tileswne,
self.options, children
).encode('utf-8')) |
Python | def generate_base_tiles(self):
"""
Generation of the base tiles (the lowest in the pyramid) directly from the input raster
"""
if not self.options.quiet:
print("Generating Base Tiles:")
if self.options.verbose:
print('')
print("Tiles generated from the max zoom level:")
print("----------------------------------------")
print('')
# Set the bounds
tminx, tminy, tmaxx, tmaxy = self.tminmax[self.tmaxz]
ds = self.warped_input_dataset
tilebands = self.dataBandsCount + 1
querysize = self.querysize
if self.options.verbose:
print("dataBandsCount: ", self.dataBandsCount)
print("tilebands: ", tilebands)
tcount = (1 + abs(tmaxx - tminx)) * (1 + abs(tmaxy - tminy))
ti = 0
tile_details = []
tz = self.tmaxz
for ty in range(tmaxy, tminy - 1, -1):
for tx in range(tminx, tmaxx + 1):
ti += 1
tilefilename = os.path.join(
self.output_folder, str(tz), str(tx), "%s.%s" % (ty, self.tileext))
if self.options.verbose:
print(ti, '/', tcount, tilefilename)
if self.options.resume and os.path.exists(tilefilename):
if self.options.verbose:
print("Tile generation skipped because of --resume")
continue
# Create directories for the tile
if not os.path.exists(os.path.dirname(tilefilename)):
os.makedirs(os.path.dirname(tilefilename))
if self.options.profile == 'mercator':
# Tile bounds in EPSG:3857
b = self.mercator.TileBounds(tx, ty, tz)
elif self.options.profile == 'geodetic':
b = self.geodetic.TileBounds(tx, ty, tz)
# Don't scale up by nearest neighbour, better change the querysize
# to the native resolution (and return smaller query tile) for scaling
if self.options.profile in ('mercator', 'geodetic'):
rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1])
# Pixel size in the raster covering query geo extent
nativesize = wb[0] + wb[2]
if self.options.verbose:
print("\tNative Extent (querysize", nativesize, "): ", rb, wb)
# Tile bounds in raster coordinates for ReadRaster query
rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1], querysize=querysize)
rx, ry, rxsize, rysize = rb
wx, wy, wxsize, wysize = wb
else: # 'raster' profile:
tsize = int(self.tsize[tz]) # tilesize in raster coordinates for actual zoom
xsize = self.warped_input_dataset.RasterXSize # size of the raster in pixels
ysize = self.warped_input_dataset.RasterYSize
if tz >= self.nativezoom:
querysize = self.tilesize
rx = (tx) * tsize
rxsize = 0
if tx == tmaxx:
rxsize = xsize % tsize
if rxsize == 0:
rxsize = tsize
rysize = 0
if ty == tmaxy:
rysize = ysize % tsize
if rysize == 0:
rysize = tsize
ry = ysize - (ty * tsize) - rysize
wx, wy = 0, 0
wxsize = int(rxsize / float(tsize) * self.tilesize)
wysize = int(rysize / float(tsize) * self.tilesize)
if wysize != self.tilesize:
wy = self.tilesize - wysize
# Read the source raster if anything is going inside the tile as per the computed
# geo_query
tile_details.append(
TileDetail(
tx=tx, ty=ty, tz=tz, rx=rx, ry=ry, rxsize=rxsize, rysize=rysize, wx=wx,
wy=wy, wxsize=wxsize, wysize=wysize, querysize=querysize,
)
)
conf = TileJobInfo(
src_file=self.tmp_vrt_filename,
nb_data_bands=self.dataBandsCount,
output_file_path=self.output_folder,
tile_extension=self.tileext,
tile_driver=self.tiledriver,
tile_size=self.tilesize,
kml=self.kml,
tminmax=self.tminmax,
tminz=self.tminz,
tmaxz=self.tmaxz,
in_srs_wkt=self.in_srs_wkt,
out_geo_trans=self.out_gt,
ominy=self.ominy,
is_epsg_4326=self.isepsg4326,
options=self.options,
)
return conf, tile_details | def generate_base_tiles(self):
"""
Generation of the base tiles (the lowest in the pyramid) directly from the input raster
"""
if not self.options.quiet:
print("Generating Base Tiles:")
if self.options.verbose:
print('')
print("Tiles generated from the max zoom level:")
print("----------------------------------------")
print('')
# Set the bounds
tminx, tminy, tmaxx, tmaxy = self.tminmax[self.tmaxz]
ds = self.warped_input_dataset
tilebands = self.dataBandsCount + 1
querysize = self.querysize
if self.options.verbose:
print("dataBandsCount: ", self.dataBandsCount)
print("tilebands: ", tilebands)
tcount = (1 + abs(tmaxx - tminx)) * (1 + abs(tmaxy - tminy))
ti = 0
tile_details = []
tz = self.tmaxz
for ty in range(tmaxy, tminy - 1, -1):
for tx in range(tminx, tmaxx + 1):
ti += 1
tilefilename = os.path.join(
self.output_folder, str(tz), str(tx), "%s.%s" % (ty, self.tileext))
if self.options.verbose:
print(ti, '/', tcount, tilefilename)
if self.options.resume and os.path.exists(tilefilename):
if self.options.verbose:
print("Tile generation skipped because of --resume")
continue
# Create directories for the tile
if not os.path.exists(os.path.dirname(tilefilename)):
os.makedirs(os.path.dirname(tilefilename))
if self.options.profile == 'mercator':
# Tile bounds in EPSG:3857
b = self.mercator.TileBounds(tx, ty, tz)
elif self.options.profile == 'geodetic':
b = self.geodetic.TileBounds(tx, ty, tz)
# Don't scale up by nearest neighbour, better change the querysize
# to the native resolution (and return smaller query tile) for scaling
if self.options.profile in ('mercator', 'geodetic'):
rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1])
# Pixel size in the raster covering query geo extent
nativesize = wb[0] + wb[2]
if self.options.verbose:
print("\tNative Extent (querysize", nativesize, "): ", rb, wb)
# Tile bounds in raster coordinates for ReadRaster query
rb, wb = self.geo_query(ds, b[0], b[3], b[2], b[1], querysize=querysize)
rx, ry, rxsize, rysize = rb
wx, wy, wxsize, wysize = wb
else: # 'raster' profile:
tsize = int(self.tsize[tz]) # tilesize in raster coordinates for actual zoom
xsize = self.warped_input_dataset.RasterXSize # size of the raster in pixels
ysize = self.warped_input_dataset.RasterYSize
if tz >= self.nativezoom:
querysize = self.tilesize
rx = (tx) * tsize
rxsize = 0
if tx == tmaxx:
rxsize = xsize % tsize
if rxsize == 0:
rxsize = tsize
rysize = 0
if ty == tmaxy:
rysize = ysize % tsize
if rysize == 0:
rysize = tsize
ry = ysize - (ty * tsize) - rysize
wx, wy = 0, 0
wxsize = int(rxsize / float(tsize) * self.tilesize)
wysize = int(rysize / float(tsize) * self.tilesize)
if wysize != self.tilesize:
wy = self.tilesize - wysize
# Read the source raster if anything is going inside the tile as per the computed
# geo_query
tile_details.append(
TileDetail(
tx=tx, ty=ty, tz=tz, rx=rx, ry=ry, rxsize=rxsize, rysize=rysize, wx=wx,
wy=wy, wxsize=wxsize, wysize=wysize, querysize=querysize,
)
)
conf = TileJobInfo(
src_file=self.tmp_vrt_filename,
nb_data_bands=self.dataBandsCount,
output_file_path=self.output_folder,
tile_extension=self.tileext,
tile_driver=self.tiledriver,
tile_size=self.tilesize,
kml=self.kml,
tminmax=self.tminmax,
tminz=self.tminz,
tmaxz=self.tmaxz,
in_srs_wkt=self.in_srs_wkt,
out_geo_trans=self.out_gt,
ominy=self.ominy,
is_epsg_4326=self.isepsg4326,
options=self.options,
)
return conf, tile_details |
Python | def generate_googlemaps(self):
"""
Template for googlemaps.html implementing Overlay of tiles for 'mercator' profile.
It returns filled string. Expected variables:
title, googlemapskey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat,
publishurl
"""
args = {}
args['title'] = self.options.title
args['googlemapskey'] = self.options.googlekey
args['south'], args['west'], args['north'], args['east'] = self.swne
args['minzoom'] = self.tminz
args['maxzoom'] = self.tmaxz
args['tilesize'] = self.tilesize
args['tileformat'] = self.tileext
args['publishurl'] = self.options.url
args['copyright'] = self.options.copyright
s = r"""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml">
<head>
<title>%(title)s</title>
<meta http-equiv="content-type" content="text/html; charset=utf-8"/>
<meta http-equiv='imagetoolbar' content='no'/>
<style type="text/css"> v\:* {behavior:url(#default#VML);}
html, body { overflow: hidden; padding: 0; height: 100%%; width: 100%%; font-family: 'Lucida Grande',Geneva,Arial,Verdana,sans-serif; }
body { margin: 10px; background: #fff; }
h1 { margin: 0; padding: 6px; border:0; font-size: 20pt; }
#header { height: 43px; padding: 0; background-color: #eee; border: 1px solid #888; }
#subheader { height: 12px; text-align: right; font-size: 10px; color: #555;}
#map { height: 95%%; border: 1px solid #888; }
</style>
<script src='http://maps.google.com/maps?file=api&v=2&key=%(googlemapskey)s'></script>
<script>
//<![CDATA[
/*
* Constants for given map
* TODO: read it from tilemapresource.xml
*/
var mapBounds = new GLatLngBounds(new GLatLng(%(south)s, %(west)s), new GLatLng(%(north)s, %(east)s));
var mapMinZoom = %(minzoom)s;
var mapMaxZoom = %(maxzoom)s;
var opacity = 0.75;
var map;
var hybridOverlay;
/*
* Create a Custom Opacity GControl
* http://www.maptiler.org/google-maps-overlay-opacity-control/
*/
var CTransparencyLENGTH = 58;
// maximum width that the knob can move (slide width minus knob width)
function CTransparencyControl( overlay ) {
this.overlay = overlay;
this.opacity = overlay.getTileLayer().getOpacity();
}
CTransparencyControl.prototype = new GControl();
// This function positions the slider to match the specified opacity
CTransparencyControl.prototype.setSlider = function(pos) {
var left = Math.round((CTransparencyLENGTH*pos));
this.slide.left = left;
this.knob.style.left = left+"px";
this.knob.style.top = "0px";
}
// This function reads the slider and sets the overlay opacity level
CTransparencyControl.prototype.setOpacity = function() {
// set the global variable
opacity = this.slide.left/CTransparencyLENGTH;
this.map.clearOverlays();
this.map.addOverlay(this.overlay, { zPriority: 0 });
if (this.map.getCurrentMapType() == G_HYBRID_MAP) {
this.map.addOverlay(hybridOverlay);
}
}
// This gets called by the API when addControl(new CTransparencyControl())
CTransparencyControl.prototype.initialize = function(map) {
var that=this;
this.map = map;
// Is this MSIE, if so we need to use AlphaImageLoader
var agent = navigator.userAgent.toLowerCase();
if ((agent.indexOf("msie") > -1) && (agent.indexOf("opera") < 1)){this.ie = true} else {this.ie = false}
// create the background graphic as a <div> containing an image
var container = document.createElement("div");
container.style.width="70px";
container.style.height="21px";
// Handle transparent PNG files in MSIE
if (this.ie) {
var loader = "filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='http://www.maptiler.org/img/opacity-slider.png', sizingMethod='crop');";
container.innerHTML = '<div style="height:21px; width:70px; ' +loader+ '" ></div>';
} else {
container.innerHTML = '<div style="height:21px; width:70px; background-image: url(http://www.maptiler.org/img/opacity-slider.png)" ></div>';
}
// create the knob as a GDraggableObject
// Handle transparent PNG files in MSIE
if (this.ie) {
var loader = "progid:DXImageTransform.Microsoft.AlphaImageLoader(src='http://www.maptiler.org/img/opacity-slider.png', sizingMethod='crop');";
this.knob = document.createElement("div");
this.knob.style.height="21px";
this.knob.style.width="13px";
this.knob.style.overflow="hidden";
this.knob_img = document.createElement("div");
this.knob_img.style.height="21px";
this.knob_img.style.width="83px";
this.knob_img.style.filter=loader;
this.knob_img.style.position="relative";
this.knob_img.style.left="-70px";
this.knob.appendChild(this.knob_img);
} else {
this.knob = document.createElement("div");
this.knob.style.height="21px";
this.knob.style.width="13px";
this.knob.style.backgroundImage="url(http://www.maptiler.org/img/opacity-slider.png)";
this.knob.style.backgroundPosition="-70px 0px";
}
container.appendChild(this.knob);
this.slide=new GDraggableObject(this.knob, {container:container});
this.slide.setDraggableCursor('pointer');
this.slide.setDraggingCursor('pointer');
this.container = container;
// attach the control to the map
map.getContainer().appendChild(container);
// init slider
this.setSlider(this.opacity);
// Listen for the slider being moved and set the opacity
GEvent.addListener(this.slide, "dragend", function() {that.setOpacity()});
//GEvent.addListener(this.container, "click", function( x, y ) { alert(x, y) });
return container;
}
// Set the default position for the control
CTransparencyControl.prototype.getDefaultPosition = function() {
return new GControlPosition(G_ANCHOR_TOP_RIGHT, new GSize(7, 47));
}
/*
* Full-screen Window Resize
*/
function getWindowHeight() {
if (self.innerHeight) return self.innerHeight;
if (document.documentElement && document.documentElement.clientHeight)
return document.documentElement.clientHeight;
if (document.body) return document.body.clientHeight;
return 0;
}
function getWindowWidth() {
if (self.innerWidth) return self.innerWidth;
if (document.documentElement && document.documentElement.clientWidth)
return document.documentElement.clientWidth;
if (document.body) return document.body.clientWidth;
return 0;
}
function resize() {
var map = document.getElementById("map");
var header = document.getElementById("header");
var subheader = document.getElementById("subheader");
map.style.height = (getWindowHeight()-80) + "px";
map.style.width = (getWindowWidth()-20) + "px";
header.style.width = (getWindowWidth()-20) + "px";
subheader.style.width = (getWindowWidth()-20) + "px";
// map.checkResize();
}
/*
* Main load function:
*/
function load() {
if (GBrowserIsCompatible()) {
// Bug in the Google Maps: Copyright for Overlay is not correctly displayed
var gcr = GMapType.prototype.getCopyrights;
GMapType.prototype.getCopyrights = function(bounds,zoom) {
return ["%(copyright)s"].concat(gcr.call(this,bounds,zoom));
}
map = new GMap2( document.getElementById("map"), { backgroundColor: '#fff' } );
map.addMapType(G_PHYSICAL_MAP);
map.setMapType(G_PHYSICAL_MAP);
map.setCenter( mapBounds.getCenter(), map.getBoundsZoomLevel( mapBounds ));
hybridOverlay = new GTileLayerOverlay( G_HYBRID_MAP.getTileLayers()[1] );
GEvent.addListener(map, "maptypechanged", function() {
if (map.getCurrentMapType() == G_HYBRID_MAP) {
map.addOverlay(hybridOverlay);
} else {
map.removeOverlay(hybridOverlay);
}
} );
var tilelayer = new GTileLayer(GCopyrightCollection(''), mapMinZoom, mapMaxZoom);
var mercator = new GMercatorProjection(mapMaxZoom+1);
tilelayer.getTileUrl = function(tile,zoom) {
if ((zoom < mapMinZoom) || (zoom > mapMaxZoom)) {
return "http://www.maptiler.org/img/none.png";
}
var ymax = 1 << zoom;
var y = ymax - tile.y -1;
var tileBounds = new GLatLngBounds(
mercator.fromPixelToLatLng( new GPoint( (tile.x)*256, (tile.y+1)*256 ) , zoom ),
mercator.fromPixelToLatLng( new GPoint( (tile.x+1)*256, (tile.y)*256 ) , zoom )
);
if (mapBounds.intersects(tileBounds)) {
return zoom+"/"+tile.x+"/"+y+".png";
} else {
return "http://www.maptiler.org/img/none.png";
}
}
// IE 7-: support for PNG alpha channel
// Unfortunately, the opacity for whole overlay is then not changeable, either or...
tilelayer.isPng = function() { return true;};
tilelayer.getOpacity = function() { return opacity; }
overlay = new GTileLayerOverlay( tilelayer );
map.addOverlay(overlay);
map.addControl(new GLargeMapControl());
map.addControl(new GHierarchicalMapTypeControl());
map.addControl(new CTransparencyControl( overlay ));
""" % args # noqa
if self.kml:
s += """
map.addMapType(G_SATELLITE_3D_MAP);
map.getEarthInstance(getEarthInstanceCB);
"""
s += """
map.enableContinuousZoom();
map.enableScrollWheelZoom();
map.setMapType(G_HYBRID_MAP);
}
resize();
}
"""
if self.kml:
s += """
function getEarthInstanceCB(object) {
var ge = object;
if (ge) {
var url = document.location.toString();
url = url.substr(0,url.lastIndexOf('/'))+'/doc.kml';
var link = ge.createLink("");
if ("%(publishurl)s") { link.setHref("%(publishurl)s/doc.kml") }
else { link.setHref(url) };
var networkLink = ge.createNetworkLink("");
networkLink.setName("TMS Map Overlay");
networkLink.setFlyToView(true);
networkLink.setLink(link);
ge.getFeatures().appendChild(networkLink);
} else {
// alert("You should open a KML in Google Earth");
// add div with the link to generated KML... - maybe JavaScript redirect to the URL of KML?
}
}
""" % args # noqa
s += """
onresize=function(){ resize(); };
//]]>
</script>
</head>
<body onload="load()">
<div id="header"><h1>%(title)s</h1></div>
<div id="subheader">Generated by <a href="http://www.klokan.cz/projects/gdal2tiles/">GDAL2Tiles</a>, Copyright © 2008 <a href="http://www.klokan.cz/">Klokan Petr Pridal</a>, <a href="http://www.gdal.org/">GDAL</a> & <a href="http://www.osgeo.org/">OSGeo</a> <a href="http://code.google.com/soc/">GSoC</a>
<!-- PLEASE, LET THIS NOTE ABOUT AUTHOR AND PROJECT SOMEWHERE ON YOUR WEBSITE, OR AT LEAST IN THE COMMENT IN HTML. THANK YOU -->
</div>
<div id="map"></div>
</body>
</html>
""" % args # noqa
return s | def generate_googlemaps(self):
"""
Template for googlemaps.html implementing Overlay of tiles for 'mercator' profile.
It returns filled string. Expected variables:
title, googlemapskey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat,
publishurl
"""
args = {}
args['title'] = self.options.title
args['googlemapskey'] = self.options.googlekey
args['south'], args['west'], args['north'], args['east'] = self.swne
args['minzoom'] = self.tminz
args['maxzoom'] = self.tmaxz
args['tilesize'] = self.tilesize
args['tileformat'] = self.tileext
args['publishurl'] = self.options.url
args['copyright'] = self.options.copyright
s = r"""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml">
<head>
<title>%(title)s</title>
<meta http-equiv="content-type" content="text/html; charset=utf-8"/>
<meta http-equiv='imagetoolbar' content='no'/>
<style type="text/css"> v\:* {behavior:url(#default#VML);}
html, body { overflow: hidden; padding: 0; height: 100%%; width: 100%%; font-family: 'Lucida Grande',Geneva,Arial,Verdana,sans-serif; }
body { margin: 10px; background: #fff; }
h1 { margin: 0; padding: 6px; border:0; font-size: 20pt; }
#header { height: 43px; padding: 0; background-color: #eee; border: 1px solid #888; }
#subheader { height: 12px; text-align: right; font-size: 10px; color: #555;}
#map { height: 95%%; border: 1px solid #888; }
</style>
<script src='http://maps.google.com/maps?file=api&v=2&key=%(googlemapskey)s'></script>
<script>
//<![CDATA[
/*
* Constants for given map
* TODO: read it from tilemapresource.xml
*/
var mapBounds = new GLatLngBounds(new GLatLng(%(south)s, %(west)s), new GLatLng(%(north)s, %(east)s));
var mapMinZoom = %(minzoom)s;
var mapMaxZoom = %(maxzoom)s;
var opacity = 0.75;
var map;
var hybridOverlay;
/*
* Create a Custom Opacity GControl
* http://www.maptiler.org/google-maps-overlay-opacity-control/
*/
var CTransparencyLENGTH = 58;
// maximum width that the knob can move (slide width minus knob width)
function CTransparencyControl( overlay ) {
this.overlay = overlay;
this.opacity = overlay.getTileLayer().getOpacity();
}
CTransparencyControl.prototype = new GControl();
// This function positions the slider to match the specified opacity
CTransparencyControl.prototype.setSlider = function(pos) {
var left = Math.round((CTransparencyLENGTH*pos));
this.slide.left = left;
this.knob.style.left = left+"px";
this.knob.style.top = "0px";
}
// This function reads the slider and sets the overlay opacity level
CTransparencyControl.prototype.setOpacity = function() {
// set the global variable
opacity = this.slide.left/CTransparencyLENGTH;
this.map.clearOverlays();
this.map.addOverlay(this.overlay, { zPriority: 0 });
if (this.map.getCurrentMapType() == G_HYBRID_MAP) {
this.map.addOverlay(hybridOverlay);
}
}
// This gets called by the API when addControl(new CTransparencyControl())
CTransparencyControl.prototype.initialize = function(map) {
var that=this;
this.map = map;
// Is this MSIE, if so we need to use AlphaImageLoader
var agent = navigator.userAgent.toLowerCase();
if ((agent.indexOf("msie") > -1) && (agent.indexOf("opera") < 1)){this.ie = true} else {this.ie = false}
// create the background graphic as a <div> containing an image
var container = document.createElement("div");
container.style.width="70px";
container.style.height="21px";
// Handle transparent PNG files in MSIE
if (this.ie) {
var loader = "filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(src='http://www.maptiler.org/img/opacity-slider.png', sizingMethod='crop');";
container.innerHTML = '<div style="height:21px; width:70px; ' +loader+ '" ></div>';
} else {
container.innerHTML = '<div style="height:21px; width:70px; background-image: url(http://www.maptiler.org/img/opacity-slider.png)" ></div>';
}
// create the knob as a GDraggableObject
// Handle transparent PNG files in MSIE
if (this.ie) {
var loader = "progid:DXImageTransform.Microsoft.AlphaImageLoader(src='http://www.maptiler.org/img/opacity-slider.png', sizingMethod='crop');";
this.knob = document.createElement("div");
this.knob.style.height="21px";
this.knob.style.width="13px";
this.knob.style.overflow="hidden";
this.knob_img = document.createElement("div");
this.knob_img.style.height="21px";
this.knob_img.style.width="83px";
this.knob_img.style.filter=loader;
this.knob_img.style.position="relative";
this.knob_img.style.left="-70px";
this.knob.appendChild(this.knob_img);
} else {
this.knob = document.createElement("div");
this.knob.style.height="21px";
this.knob.style.width="13px";
this.knob.style.backgroundImage="url(http://www.maptiler.org/img/opacity-slider.png)";
this.knob.style.backgroundPosition="-70px 0px";
}
container.appendChild(this.knob);
this.slide=new GDraggableObject(this.knob, {container:container});
this.slide.setDraggableCursor('pointer');
this.slide.setDraggingCursor('pointer');
this.container = container;
// attach the control to the map
map.getContainer().appendChild(container);
// init slider
this.setSlider(this.opacity);
// Listen for the slider being moved and set the opacity
GEvent.addListener(this.slide, "dragend", function() {that.setOpacity()});
//GEvent.addListener(this.container, "click", function( x, y ) { alert(x, y) });
return container;
}
// Set the default position for the control
CTransparencyControl.prototype.getDefaultPosition = function() {
return new GControlPosition(G_ANCHOR_TOP_RIGHT, new GSize(7, 47));
}
/*
* Full-screen Window Resize
*/
function getWindowHeight() {
if (self.innerHeight) return self.innerHeight;
if (document.documentElement && document.documentElement.clientHeight)
return document.documentElement.clientHeight;
if (document.body) return document.body.clientHeight;
return 0;
}
function getWindowWidth() {
if (self.innerWidth) return self.innerWidth;
if (document.documentElement && document.documentElement.clientWidth)
return document.documentElement.clientWidth;
if (document.body) return document.body.clientWidth;
return 0;
}
function resize() {
var map = document.getElementById("map");
var header = document.getElementById("header");
var subheader = document.getElementById("subheader");
map.style.height = (getWindowHeight()-80) + "px";
map.style.width = (getWindowWidth()-20) + "px";
header.style.width = (getWindowWidth()-20) + "px";
subheader.style.width = (getWindowWidth()-20) + "px";
// map.checkResize();
}
/*
* Main load function:
*/
function load() {
if (GBrowserIsCompatible()) {
// Bug in the Google Maps: Copyright for Overlay is not correctly displayed
var gcr = GMapType.prototype.getCopyrights;
GMapType.prototype.getCopyrights = function(bounds,zoom) {
return ["%(copyright)s"].concat(gcr.call(this,bounds,zoom));
}
map = new GMap2( document.getElementById("map"), { backgroundColor: '#fff' } );
map.addMapType(G_PHYSICAL_MAP);
map.setMapType(G_PHYSICAL_MAP);
map.setCenter( mapBounds.getCenter(), map.getBoundsZoomLevel( mapBounds ));
hybridOverlay = new GTileLayerOverlay( G_HYBRID_MAP.getTileLayers()[1] );
GEvent.addListener(map, "maptypechanged", function() {
if (map.getCurrentMapType() == G_HYBRID_MAP) {
map.addOverlay(hybridOverlay);
} else {
map.removeOverlay(hybridOverlay);
}
} );
var tilelayer = new GTileLayer(GCopyrightCollection(''), mapMinZoom, mapMaxZoom);
var mercator = new GMercatorProjection(mapMaxZoom+1);
tilelayer.getTileUrl = function(tile,zoom) {
if ((zoom < mapMinZoom) || (zoom > mapMaxZoom)) {
return "http://www.maptiler.org/img/none.png";
}
var ymax = 1 << zoom;
var y = ymax - tile.y -1;
var tileBounds = new GLatLngBounds(
mercator.fromPixelToLatLng( new GPoint( (tile.x)*256, (tile.y+1)*256 ) , zoom ),
mercator.fromPixelToLatLng( new GPoint( (tile.x+1)*256, (tile.y)*256 ) , zoom )
);
if (mapBounds.intersects(tileBounds)) {
return zoom+"/"+tile.x+"/"+y+".png";
} else {
return "http://www.maptiler.org/img/none.png";
}
}
// IE 7-: support for PNG alpha channel
// Unfortunately, the opacity for whole overlay is then not changeable, either or...
tilelayer.isPng = function() { return true;};
tilelayer.getOpacity = function() { return opacity; }
overlay = new GTileLayerOverlay( tilelayer );
map.addOverlay(overlay);
map.addControl(new GLargeMapControl());
map.addControl(new GHierarchicalMapTypeControl());
map.addControl(new CTransparencyControl( overlay ));
""" % args # noqa
if self.kml:
s += """
map.addMapType(G_SATELLITE_3D_MAP);
map.getEarthInstance(getEarthInstanceCB);
"""
s += """
map.enableContinuousZoom();
map.enableScrollWheelZoom();
map.setMapType(G_HYBRID_MAP);
}
resize();
}
"""
if self.kml:
s += """
function getEarthInstanceCB(object) {
var ge = object;
if (ge) {
var url = document.location.toString();
url = url.substr(0,url.lastIndexOf('/'))+'/doc.kml';
var link = ge.createLink("");
if ("%(publishurl)s") { link.setHref("%(publishurl)s/doc.kml") }
else { link.setHref(url) };
var networkLink = ge.createNetworkLink("");
networkLink.setName("TMS Map Overlay");
networkLink.setFlyToView(true);
networkLink.setLink(link);
ge.getFeatures().appendChild(networkLink);
} else {
// alert("You should open a KML in Google Earth");
// add div with the link to generated KML... - maybe JavaScript redirect to the URL of KML?
}
}
""" % args # noqa
s += """
onresize=function(){ resize(); };
//]]>
</script>
</head>
<body onload="load()">
<div id="header"><h1>%(title)s</h1></div>
<div id="subheader">Generated by <a href="http://www.klokan.cz/projects/gdal2tiles/">GDAL2Tiles</a>, Copyright © 2008 <a href="http://www.klokan.cz/">Klokan Petr Pridal</a>, <a href="http://www.gdal.org/">GDAL</a> & <a href="http://www.osgeo.org/">OSGeo</a> <a href="http://code.google.com/soc/">GSoC</a>
<!-- PLEASE, LET THIS NOTE ABOUT AUTHOR AND PROJECT SOMEWHERE ON YOUR WEBSITE, OR AT LEAST IN THE COMMENT IN HTML. THANK YOU -->
</div>
<div id="map"></div>
</body>
</html>
""" % args # noqa
return s |
Python | def generate_leaflet(self):
"""
Template for leaflet.html implementing overlay of tiles for 'mercator' profile.
It returns filled string. Expected variables:
title, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl
"""
args = {}
args['title'] = self.options.title.replace('"', '\\"')
args['htmltitle'] = self.options.title
args['south'], args['west'], args['north'], args['east'] = self.swne
args['centerlon'] = (args['north'] + args['south']) / 2.
args['centerlat'] = (args['west'] + args['east']) / 2.
args['minzoom'] = self.tminz
args['maxzoom'] = self.tmaxz
args['beginzoom'] = self.tmaxz
args['tilesize'] = self.tilesize # not used
args['tileformat'] = self.tileext
args['publishurl'] = self.options.url # not used
args['copyright'] = self.options.copyright.replace('"', '\\"')
s = """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name='viewport' content='width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no' />
<title>%(htmltitle)s</title>
<!-- Leaflet -->
<link rel="stylesheet" href="http://cdn.leafletjs.com/leaflet-0.7.5/leaflet.css" />
<script src="http://cdn.leafletjs.com/leaflet-0.7.5/leaflet.js"></script>
<style>
body { margin:0; padding:0; }
body, table, tr, td, th, div, h1, h2, input { font-family: "Calibri", "Trebuchet MS", "Ubuntu", Serif; font-size: 11pt; }
#map { position:absolute; top:0; bottom:0; width:100%%; } /* full size */
.ctl {
padding: 2px 10px 2px 10px;
background: white;
background: rgba(255,255,255,0.9);
box-shadow: 0 0 15px rgba(0,0,0,0.2);
border-radius: 5px;
text-align: right;
}
.title {
font-size: 18pt;
font-weight: bold;
}
.src {
font-size: 10pt;
}
</style>
</head>
<body>
<div id="map"></div>
<script>
/* **** Leaflet **** */
// Base layers
// .. OpenStreetMap
var osm = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'});
// .. CartoDB Positron
var cartodb = L.tileLayer('http://{s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png', {attribution: '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors, © <a href="http://cartodb.com/attributions">CartoDB</a>'});
// .. OSM Toner
var toner = L.tileLayer('http://{s}.tile.stamen.com/toner/{z}/{x}/{y}.png', {attribution: 'Map tiles by <a href="http://stamen.com">Stamen Design</a>, under <a href="http://creativecommons.org/licenses/by/3.0">CC BY 3.0</a>. Data by <a href="http://openstreetmap.org">OpenStreetMap</a>, under <a href="http://www.openstreetmap.org/copyright">ODbL</a>.'});
// .. White background
var white = L.tileLayer("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAQAAAAEAAQMAAABmvDolAAAAA1BMVEX///+nxBvIAAAAH0lEQVQYGe3BAQ0AAADCIPunfg43YAAAAAAAAAAA5wIhAAAB9aK9BAAAAABJRU5ErkJggg==");
// Overlay layers (TMS)
var lyr = L.tileLayer('./{z}/{x}/{y}.%(tileformat)s', {tms: true, opacity: 0.7, attribution: "%(copyright)s"});
// Map
var map = L.map('map', {
center: [%(centerlon)s, %(centerlat)s],
zoom: %(beginzoom)s,
minZoom: %(minzoom)s,
maxZoom: %(maxzoom)s,
layers: [osm]
});
var basemaps = {"OpenStreetMap": osm, "CartoDB Positron": cartodb, "Stamen Toner": toner, "Without background": white}
var overlaymaps = {"Layer": lyr}
// Title
var title = L.control();
title.onAdd = function(map) {
this._div = L.DomUtil.create('div', 'ctl title');
this.update();
return this._div;
};
title.update = function(props) {
this._div.innerHTML = "%(title)s";
};
title.addTo(map);
// Note
var src = 'Generated by <a href="http://www.klokan.cz/projects/gdal2tiles/">GDAL2Tiles</a>, Copyright © 2008 <a href="http://www.klokan.cz/">Klokan Petr Pridal</a>, <a href="http://www.gdal.org/">GDAL</a> & <a href="http://www.osgeo.org/">OSGeo</a> <a href="http://code.google.com/soc/">GSoC</a>';
var title = L.control({position: 'bottomleft'});
title.onAdd = function(map) {
this._div = L.DomUtil.create('div', 'ctl src');
this.update();
return this._div;
};
title.update = function(props) {
this._div.innerHTML = src;
};
title.addTo(map);
// Add base layers
L.control.layers(basemaps, overlaymaps, {collapsed: false}).addTo(map);
// Fit to overlay bounds (SW and NE points with (lat, lon))
map.fitBounds([[%(south)s, %(east)s], [%(north)s, %(west)s]]);
</script>
</body>
</html>
""" % args # noqa
return s | def generate_leaflet(self):
"""
Template for leaflet.html implementing overlay of tiles for 'mercator' profile.
It returns filled string. Expected variables:
title, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl
"""
args = {}
args['title'] = self.options.title.replace('"', '\\"')
args['htmltitle'] = self.options.title
args['south'], args['west'], args['north'], args['east'] = self.swne
args['centerlon'] = (args['north'] + args['south']) / 2.
args['centerlat'] = (args['west'] + args['east']) / 2.
args['minzoom'] = self.tminz
args['maxzoom'] = self.tmaxz
args['beginzoom'] = self.tmaxz
args['tilesize'] = self.tilesize # not used
args['tileformat'] = self.tileext
args['publishurl'] = self.options.url # not used
args['copyright'] = self.options.copyright.replace('"', '\\"')
s = """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name='viewport' content='width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no' />
<title>%(htmltitle)s</title>
<!-- Leaflet -->
<link rel="stylesheet" href="http://cdn.leafletjs.com/leaflet-0.7.5/leaflet.css" />
<script src="http://cdn.leafletjs.com/leaflet-0.7.5/leaflet.js"></script>
<style>
body { margin:0; padding:0; }
body, table, tr, td, th, div, h1, h2, input { font-family: "Calibri", "Trebuchet MS", "Ubuntu", Serif; font-size: 11pt; }
#map { position:absolute; top:0; bottom:0; width:100%%; } /* full size */
.ctl {
padding: 2px 10px 2px 10px;
background: white;
background: rgba(255,255,255,0.9);
box-shadow: 0 0 15px rgba(0,0,0,0.2);
border-radius: 5px;
text-align: right;
}
.title {
font-size: 18pt;
font-weight: bold;
}
.src {
font-size: 10pt;
}
</style>
</head>
<body>
<div id="map"></div>
<script>
/* **** Leaflet **** */
// Base layers
// .. OpenStreetMap
var osm = L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'});
// .. CartoDB Positron
var cartodb = L.tileLayer('http://{s}.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png', {attribution: '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a> contributors, © <a href="http://cartodb.com/attributions">CartoDB</a>'});
// .. OSM Toner
var toner = L.tileLayer('http://{s}.tile.stamen.com/toner/{z}/{x}/{y}.png', {attribution: 'Map tiles by <a href="http://stamen.com">Stamen Design</a>, under <a href="http://creativecommons.org/licenses/by/3.0">CC BY 3.0</a>. Data by <a href="http://openstreetmap.org">OpenStreetMap</a>, under <a href="http://www.openstreetmap.org/copyright">ODbL</a>.'});
// .. White background
var white = L.tileLayer("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAQAAAAEAAQMAAABmvDolAAAAA1BMVEX///+nxBvIAAAAH0lEQVQYGe3BAQ0AAADCIPunfg43YAAAAAAAAAAA5wIhAAAB9aK9BAAAAABJRU5ErkJggg==");
// Overlay layers (TMS)
var lyr = L.tileLayer('./{z}/{x}/{y}.%(tileformat)s', {tms: true, opacity: 0.7, attribution: "%(copyright)s"});
// Map
var map = L.map('map', {
center: [%(centerlon)s, %(centerlat)s],
zoom: %(beginzoom)s,
minZoom: %(minzoom)s,
maxZoom: %(maxzoom)s,
layers: [osm]
});
var basemaps = {"OpenStreetMap": osm, "CartoDB Positron": cartodb, "Stamen Toner": toner, "Without background": white}
var overlaymaps = {"Layer": lyr}
// Title
var title = L.control();
title.onAdd = function(map) {
this._div = L.DomUtil.create('div', 'ctl title');
this.update();
return this._div;
};
title.update = function(props) {
this._div.innerHTML = "%(title)s";
};
title.addTo(map);
// Note
var src = 'Generated by <a href="http://www.klokan.cz/projects/gdal2tiles/">GDAL2Tiles</a>, Copyright © 2008 <a href="http://www.klokan.cz/">Klokan Petr Pridal</a>, <a href="http://www.gdal.org/">GDAL</a> & <a href="http://www.osgeo.org/">OSGeo</a> <a href="http://code.google.com/soc/">GSoC</a>';
var title = L.control({position: 'bottomleft'});
title.onAdd = function(map) {
this._div = L.DomUtil.create('div', 'ctl src');
this.update();
return this._div;
};
title.update = function(props) {
this._div.innerHTML = src;
};
title.addTo(map);
// Add base layers
L.control.layers(basemaps, overlaymaps, {collapsed: false}).addTo(map);
// Fit to overlay bounds (SW and NE points with (lat, lon))
map.fitBounds([[%(south)s, %(east)s], [%(north)s, %(west)s]]);
</script>
</body>
</html>
""" % args # noqa
return s |
Python | def generate_openlayers(self):
"""
Template for openlayers.html implementing overlay of available Spherical Mercator layers.
It returns filled string. Expected variables:
title, bingkey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl
"""
args = {}
args['title'] = self.options.title
args['bingkey'] = self.options.bingkey
args['south'], args['west'], args['north'], args['east'] = self.swne
args['minzoom'] = self.tminz
args['maxzoom'] = self.tmaxz
args['tilesize'] = self.tilesize
args['tileformat'] = self.tileext
args['publishurl'] = self.options.url
args['copyright'] = self.options.copyright
if self.options.tmscompatible:
args['tmsoffset'] = "-1"
else:
args['tmsoffset'] = ""
if self.options.profile == 'raster':
args['rasterzoomlevels'] = self.tmaxz + 1
args['rastermaxresolution'] = 2**(self.nativezoom) * self.out_gt[1]
s = r"""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml"
<head>
<title>%(title)s</title>
<meta http-equiv='imagetoolbar' content='no'/>
<style type="text/css"> v\:* {behavior:url(#default#VML);}
html, body { overflow: hidden; padding: 0; height: 100%%; width: 100%%; font-family: 'Lucida Grande',Geneva,Arial,Verdana,sans-serif; }
body { margin: 10px; background: #fff; }
h1 { margin: 0; padding: 6px; border:0; font-size: 20pt; }
#header { height: 43px; padding: 0; background-color: #eee; border: 1px solid #888; }
#subheader { height: 12px; text-align: right; font-size: 10px; color: #555;}
#map { height: 95%%; border: 1px solid #888; }
.olImageLoadError { display: none; }
.olControlLayerSwitcher .layersDiv { border-radius: 10px 0 0 10px; }
</style>""" % args # noqa
if self.options.profile == 'mercator':
s += """
<script src='http://maps.google.com/maps/api/js?sensor=false&v=3.7'></script>
""" % args
s += """
<script src="http://www.openlayers.org/api/2.12/OpenLayers.js"></script>
<script>
var map;
var mapBounds = new OpenLayers.Bounds( %(west)s, %(south)s, %(east)s, %(north)s);
var mapMinZoom = %(minzoom)s;
var mapMaxZoom = %(maxzoom)s;
var emptyTileURL = "http://www.maptiler.org/img/none.png";
OpenLayers.IMAGE_RELOAD_ATTEMPTS = 3;
function init(){""" % args
if self.options.profile == 'mercator':
s += """
var options = {
div: "map",
controls: [],
projection: "EPSG:3857",
displayProjection: new OpenLayers.Projection("EPSG:4326"),
numZoomLevels: 20
};
map = new OpenLayers.Map(options);
// Create Google Mercator layers
var gmap = new OpenLayers.Layer.Google("Google Streets",
{
type: google.maps.MapTypeId.ROADMAP,
sphericalMercator: true
});
var gsat = new OpenLayers.Layer.Google("Google Satellite",
{
type: google.maps.MapTypeId.SATELLITE,
sphericalMercator: true
});
var ghyb = new OpenLayers.Layer.Google("Google Hybrid",
{
type: google.maps.MapTypeId.HYBRID,
sphericalMercator: true
});
var gter = new OpenLayers.Layer.Google("Google Terrain",
{
type: google.maps.MapTypeId.TERRAIN,
sphericalMercator: true
});
// Create Bing layers
var broad = new OpenLayers.Layer.Bing({
name: "Bing Roads",
key: "%(bingkey)s",
type: "Road",
sphericalMercator: true
});
var baer = new OpenLayers.Layer.Bing({
name: "Bing Aerial",
key: "%(bingkey)s",
type: "Aerial",
sphericalMercator: true
});
var bhyb = new OpenLayers.Layer.Bing({
name: "Bing Hybrid",
key: "%(bingkey)s",
type: "AerialWithLabels",
sphericalMercator: true
});
// Create OSM layer
var osm = new OpenLayers.Layer.OSM("OpenStreetMap");
// create TMS Overlay layer
var tmsoverlay = new OpenLayers.Layer.TMS("TMS Overlay", "",
{
serviceVersion: '.',
layername: '.',
alpha: true,
type: '%(tileformat)s',
isBaseLayer: false,
getURL: getURL
});
if (OpenLayers.Util.alphaHack() == false) {
tmsoverlay.setOpacity(0.7);
}
map.addLayers([gmap, gsat, ghyb, gter,
broad, baer, bhyb,
osm, tmsoverlay]);
var switcherControl = new OpenLayers.Control.LayerSwitcher();
map.addControl(switcherControl);
switcherControl.maximizeControl();
map.zoomToExtent(mapBounds.transform(map.displayProjection, map.projection));
""" % args # noqa
elif self.options.profile == 'geodetic':
s += """
var options = {
div: "map",
controls: [],
projection: "EPSG:4326"
};
map = new OpenLayers.Map(options);
var wms = new OpenLayers.Layer.WMS("VMap0",
"http://tilecache.osgeo.org/wms-c/Basic.py?",
{
layers: 'basic',
format: 'image/png'
}
);
var tmsoverlay = new OpenLayers.Layer.TMS("TMS Overlay", "",
{
serviceVersion: '.',
layername: '.',
alpha: true,
type: '%(tileformat)s',
isBaseLayer: false,
getURL: getURL
});
if (OpenLayers.Util.alphaHack() == false) {
tmsoverlay.setOpacity(0.7);
}
map.addLayers([wms,tmsoverlay]);
var switcherControl = new OpenLayers.Control.LayerSwitcher();
map.addControl(switcherControl);
switcherControl.maximizeControl();
map.zoomToExtent(mapBounds);
""" % args # noqa
elif self.options.profile == 'raster':
s += """
var options = {
div: "map",
controls: [],
maxExtent: new OpenLayers.Bounds(%(west)s, %(south)s, %(east)s, %(north)s),
maxResolution: %(rastermaxresolution)f,
numZoomLevels: %(rasterzoomlevels)d
};
map = new OpenLayers.Map(options);
var layer = new OpenLayers.Layer.TMS("TMS Layer", "",
{
serviceVersion: '.',
layername: '.',
alpha: true,
type: '%(tileformat)s',
getURL: getURL
});
map.addLayer(layer);
map.zoomToExtent(mapBounds);
""" % args # noqa
s += """
map.addControls([new OpenLayers.Control.PanZoomBar(),
new OpenLayers.Control.Navigation(),
new OpenLayers.Control.MousePosition(),
new OpenLayers.Control.ArgParser(),
new OpenLayers.Control.Attribution()]);
}
""" % args
if self.options.profile == 'mercator':
s += """
function getURL(bounds) {
bounds = this.adjustBounds(bounds);
var res = this.getServerResolution();
var x = Math.round((bounds.left - this.tileOrigin.lon) / (res * this.tileSize.w));
var y = Math.round((bounds.bottom - this.tileOrigin.lat) / (res * this.tileSize.h));
var z = this.getServerZoom();
if (this.map.baseLayer.CLASS_NAME === 'OpenLayers.Layer.Bing') {
z+=1;
}
var path = this.serviceVersion + "/" + this.layername + "/" + z + "/" + x + "/" + y + "." + this.type;
var url = this.url;
if (OpenLayers.Util.isArray(url)) {
url = this.selectUrl(path, url);
}
if (mapBounds.intersectsBounds(bounds) && (z >= mapMinZoom) && (z <= mapMaxZoom)) {
return url + path;
} else {
return emptyTileURL;
}
}
""" % args # noqa
elif self.options.profile == 'geodetic':
s += """
function getURL(bounds) {
bounds = this.adjustBounds(bounds);
var res = this.getServerResolution();
var x = Math.round((bounds.left - this.tileOrigin.lon) / (res * this.tileSize.w));
var y = Math.round((bounds.bottom - this.tileOrigin.lat) / (res * this.tileSize.h));
var z = this.getServerZoom()%(tmsoffset)s;
var path = this.serviceVersion + "/" + this.layername + "/" + z + "/" + x + "/" + y + "." + this.type;
var url = this.url;
if (OpenLayers.Util.isArray(url)) {
url = this.selectUrl(path, url);
}
if (mapBounds.intersectsBounds(bounds) && (z >= mapMinZoom) && (z <= mapMaxZoom)) {
return url + path;
} else {
return emptyTileURL;
}
}
""" % args # noqa
elif self.options.profile == 'raster':
s += """
function getURL(bounds) {
bounds = this.adjustBounds(bounds);
var res = this.getServerResolution();
var x = Math.round((bounds.left - this.tileOrigin.lon) / (res * this.tileSize.w));
var y = Math.round((bounds.bottom - this.tileOrigin.lat) / (res * this.tileSize.h));
var z = this.getServerZoom();
var path = this.serviceVersion + "/" + this.layername + "/" + z + "/" + x + "/" + y + "." + this.type;
var url = this.url;
if (OpenLayers.Util.isArray(url)) {
url = this.selectUrl(path, url);
}
if (mapBounds.intersectsBounds(bounds) && (z >= mapMinZoom) && (z <= mapMaxZoom)) {
return url + path;
} else {
return emptyTileURL;
}
}
""" % args # noqa
s += """
function getWindowHeight() {
if (self.innerHeight) return self.innerHeight;
if (document.documentElement && document.documentElement.clientHeight)
return document.documentElement.clientHeight;
if (document.body) return document.body.clientHeight;
return 0;
}
function getWindowWidth() {
if (self.innerWidth) return self.innerWidth;
if (document.documentElement && document.documentElement.clientWidth)
return document.documentElement.clientWidth;
if (document.body) return document.body.clientWidth;
return 0;
}
function resize() {
var map = document.getElementById("map");
var header = document.getElementById("header");
var subheader = document.getElementById("subheader");
map.style.height = (getWindowHeight()-80) + "px";
map.style.width = (getWindowWidth()-20) + "px";
header.style.width = (getWindowWidth()-20) + "px";
subheader.style.width = (getWindowWidth()-20) + "px";
if (map.updateSize) { map.updateSize(); };
}
onresize=function(){ resize(); };
</script>
</head>
<body onload="init()">
<div id="header"><h1>%(title)s</h1></div>
<div id="subheader">Generated by <a href="http://www.klokan.cz/projects/gdal2tiles/">GDAL2Tiles</a>, Copyright © 2008 <a href="http://www.klokan.cz/">Klokan Petr Pridal</a>, <a href="http://www.gdal.org/">GDAL</a> & <a href="http://www.osgeo.org/">OSGeo</a> <a href="http://code.google.com/soc/">GSoC</a>
<!-- PLEASE, LET THIS NOTE ABOUT AUTHOR AND PROJECT SOMEWHERE ON YOUR WEBSITE, OR AT LEAST IN THE COMMENT IN HTML. THANK YOU -->
</div>
<div id="map"></div>
<script type="text/javascript" >resize()</script>
</body>
</html>""" % args # noqa
return s | def generate_openlayers(self):
"""
Template for openlayers.html implementing overlay of available Spherical Mercator layers.
It returns filled string. Expected variables:
title, bingkey, north, south, east, west, minzoom, maxzoom, tilesize, tileformat, publishurl
"""
args = {}
args['title'] = self.options.title
args['bingkey'] = self.options.bingkey
args['south'], args['west'], args['north'], args['east'] = self.swne
args['minzoom'] = self.tminz
args['maxzoom'] = self.tmaxz
args['tilesize'] = self.tilesize
args['tileformat'] = self.tileext
args['publishurl'] = self.options.url
args['copyright'] = self.options.copyright
if self.options.tmscompatible:
args['tmsoffset'] = "-1"
else:
args['tmsoffset'] = ""
if self.options.profile == 'raster':
args['rasterzoomlevels'] = self.tmaxz + 1
args['rastermaxresolution'] = 2**(self.nativezoom) * self.out_gt[1]
s = r"""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml"
<head>
<title>%(title)s</title>
<meta http-equiv='imagetoolbar' content='no'/>
<style type="text/css"> v\:* {behavior:url(#default#VML);}
html, body { overflow: hidden; padding: 0; height: 100%%; width: 100%%; font-family: 'Lucida Grande',Geneva,Arial,Verdana,sans-serif; }
body { margin: 10px; background: #fff; }
h1 { margin: 0; padding: 6px; border:0; font-size: 20pt; }
#header { height: 43px; padding: 0; background-color: #eee; border: 1px solid #888; }
#subheader { height: 12px; text-align: right; font-size: 10px; color: #555;}
#map { height: 95%%; border: 1px solid #888; }
.olImageLoadError { display: none; }
.olControlLayerSwitcher .layersDiv { border-radius: 10px 0 0 10px; }
</style>""" % args # noqa
if self.options.profile == 'mercator':
s += """
<script src='http://maps.google.com/maps/api/js?sensor=false&v=3.7'></script>
""" % args
s += """
<script src="http://www.openlayers.org/api/2.12/OpenLayers.js"></script>
<script>
var map;
var mapBounds = new OpenLayers.Bounds( %(west)s, %(south)s, %(east)s, %(north)s);
var mapMinZoom = %(minzoom)s;
var mapMaxZoom = %(maxzoom)s;
var emptyTileURL = "http://www.maptiler.org/img/none.png";
OpenLayers.IMAGE_RELOAD_ATTEMPTS = 3;
function init(){""" % args
if self.options.profile == 'mercator':
s += """
var options = {
div: "map",
controls: [],
projection: "EPSG:3857",
displayProjection: new OpenLayers.Projection("EPSG:4326"),
numZoomLevels: 20
};
map = new OpenLayers.Map(options);
// Create Google Mercator layers
var gmap = new OpenLayers.Layer.Google("Google Streets",
{
type: google.maps.MapTypeId.ROADMAP,
sphericalMercator: true
});
var gsat = new OpenLayers.Layer.Google("Google Satellite",
{
type: google.maps.MapTypeId.SATELLITE,
sphericalMercator: true
});
var ghyb = new OpenLayers.Layer.Google("Google Hybrid",
{
type: google.maps.MapTypeId.HYBRID,
sphericalMercator: true
});
var gter = new OpenLayers.Layer.Google("Google Terrain",
{
type: google.maps.MapTypeId.TERRAIN,
sphericalMercator: true
});
// Create Bing layers
var broad = new OpenLayers.Layer.Bing({
name: "Bing Roads",
key: "%(bingkey)s",
type: "Road",
sphericalMercator: true
});
var baer = new OpenLayers.Layer.Bing({
name: "Bing Aerial",
key: "%(bingkey)s",
type: "Aerial",
sphericalMercator: true
});
var bhyb = new OpenLayers.Layer.Bing({
name: "Bing Hybrid",
key: "%(bingkey)s",
type: "AerialWithLabels",
sphericalMercator: true
});
// Create OSM layer
var osm = new OpenLayers.Layer.OSM("OpenStreetMap");
// create TMS Overlay layer
var tmsoverlay = new OpenLayers.Layer.TMS("TMS Overlay", "",
{
serviceVersion: '.',
layername: '.',
alpha: true,
type: '%(tileformat)s',
isBaseLayer: false,
getURL: getURL
});
if (OpenLayers.Util.alphaHack() == false) {
tmsoverlay.setOpacity(0.7);
}
map.addLayers([gmap, gsat, ghyb, gter,
broad, baer, bhyb,
osm, tmsoverlay]);
var switcherControl = new OpenLayers.Control.LayerSwitcher();
map.addControl(switcherControl);
switcherControl.maximizeControl();
map.zoomToExtent(mapBounds.transform(map.displayProjection, map.projection));
""" % args # noqa
elif self.options.profile == 'geodetic':
s += """
var options = {
div: "map",
controls: [],
projection: "EPSG:4326"
};
map = new OpenLayers.Map(options);
var wms = new OpenLayers.Layer.WMS("VMap0",
"http://tilecache.osgeo.org/wms-c/Basic.py?",
{
layers: 'basic',
format: 'image/png'
}
);
var tmsoverlay = new OpenLayers.Layer.TMS("TMS Overlay", "",
{
serviceVersion: '.',
layername: '.',
alpha: true,
type: '%(tileformat)s',
isBaseLayer: false,
getURL: getURL
});
if (OpenLayers.Util.alphaHack() == false) {
tmsoverlay.setOpacity(0.7);
}
map.addLayers([wms,tmsoverlay]);
var switcherControl = new OpenLayers.Control.LayerSwitcher();
map.addControl(switcherControl);
switcherControl.maximizeControl();
map.zoomToExtent(mapBounds);
""" % args # noqa
elif self.options.profile == 'raster':
s += """
var options = {
div: "map",
controls: [],
maxExtent: new OpenLayers.Bounds(%(west)s, %(south)s, %(east)s, %(north)s),
maxResolution: %(rastermaxresolution)f,
numZoomLevels: %(rasterzoomlevels)d
};
map = new OpenLayers.Map(options);
var layer = new OpenLayers.Layer.TMS("TMS Layer", "",
{
serviceVersion: '.',
layername: '.',
alpha: true,
type: '%(tileformat)s',
getURL: getURL
});
map.addLayer(layer);
map.zoomToExtent(mapBounds);
""" % args # noqa
s += """
map.addControls([new OpenLayers.Control.PanZoomBar(),
new OpenLayers.Control.Navigation(),
new OpenLayers.Control.MousePosition(),
new OpenLayers.Control.ArgParser(),
new OpenLayers.Control.Attribution()]);
}
""" % args
if self.options.profile == 'mercator':
s += """
function getURL(bounds) {
bounds = this.adjustBounds(bounds);
var res = this.getServerResolution();
var x = Math.round((bounds.left - this.tileOrigin.lon) / (res * this.tileSize.w));
var y = Math.round((bounds.bottom - this.tileOrigin.lat) / (res * this.tileSize.h));
var z = this.getServerZoom();
if (this.map.baseLayer.CLASS_NAME === 'OpenLayers.Layer.Bing') {
z+=1;
}
var path = this.serviceVersion + "/" + this.layername + "/" + z + "/" + x + "/" + y + "." + this.type;
var url = this.url;
if (OpenLayers.Util.isArray(url)) {
url = this.selectUrl(path, url);
}
if (mapBounds.intersectsBounds(bounds) && (z >= mapMinZoom) && (z <= mapMaxZoom)) {
return url + path;
} else {
return emptyTileURL;
}
}
""" % args # noqa
elif self.options.profile == 'geodetic':
s += """
function getURL(bounds) {
bounds = this.adjustBounds(bounds);
var res = this.getServerResolution();
var x = Math.round((bounds.left - this.tileOrigin.lon) / (res * this.tileSize.w));
var y = Math.round((bounds.bottom - this.tileOrigin.lat) / (res * this.tileSize.h));
var z = this.getServerZoom()%(tmsoffset)s;
var path = this.serviceVersion + "/" + this.layername + "/" + z + "/" + x + "/" + y + "." + this.type;
var url = this.url;
if (OpenLayers.Util.isArray(url)) {
url = this.selectUrl(path, url);
}
if (mapBounds.intersectsBounds(bounds) && (z >= mapMinZoom) && (z <= mapMaxZoom)) {
return url + path;
} else {
return emptyTileURL;
}
}
""" % args # noqa
elif self.options.profile == 'raster':
s += """
function getURL(bounds) {
bounds = this.adjustBounds(bounds);
var res = this.getServerResolution();
var x = Math.round((bounds.left - this.tileOrigin.lon) / (res * this.tileSize.w));
var y = Math.round((bounds.bottom - this.tileOrigin.lat) / (res * this.tileSize.h));
var z = this.getServerZoom();
var path = this.serviceVersion + "/" + this.layername + "/" + z + "/" + x + "/" + y + "." + this.type;
var url = this.url;
if (OpenLayers.Util.isArray(url)) {
url = this.selectUrl(path, url);
}
if (mapBounds.intersectsBounds(bounds) && (z >= mapMinZoom) && (z <= mapMaxZoom)) {
return url + path;
} else {
return emptyTileURL;
}
}
""" % args # noqa
s += """
function getWindowHeight() {
if (self.innerHeight) return self.innerHeight;
if (document.documentElement && document.documentElement.clientHeight)
return document.documentElement.clientHeight;
if (document.body) return document.body.clientHeight;
return 0;
}
function getWindowWidth() {
if (self.innerWidth) return self.innerWidth;
if (document.documentElement && document.documentElement.clientWidth)
return document.documentElement.clientWidth;
if (document.body) return document.body.clientWidth;
return 0;
}
function resize() {
var map = document.getElementById("map");
var header = document.getElementById("header");
var subheader = document.getElementById("subheader");
map.style.height = (getWindowHeight()-80) + "px";
map.style.width = (getWindowWidth()-20) + "px";
header.style.width = (getWindowWidth()-20) + "px";
subheader.style.width = (getWindowWidth()-20) + "px";
if (map.updateSize) { map.updateSize(); };
}
onresize=function(){ resize(); };
</script>
</head>
<body onload="init()">
<div id="header"><h1>%(title)s</h1></div>
<div id="subheader">Generated by <a href="http://www.klokan.cz/projects/gdal2tiles/">GDAL2Tiles</a>, Copyright © 2008 <a href="http://www.klokan.cz/">Klokan Petr Pridal</a>, <a href="http://www.gdal.org/">GDAL</a> & <a href="http://www.osgeo.org/">OSGeo</a> <a href="http://code.google.com/soc/">GSoC</a>
<!-- PLEASE, LET THIS NOTE ABOUT AUTHOR AND PROJECT SOMEWHERE ON YOUR WEBSITE, OR AT LEAST IN THE COMMENT IN HTML. THANK YOU -->
</div>
<div id="map"></div>
<script type="text/javascript" >resize()</script>
</body>
</html>""" % args # noqa
return s |
Python | def single_threaded_tiling(input_file, output_folder, **options):
"""Generate tiles using single process.
Keep a single threaded version that stays clear of multiprocessing,
for platforms that would not support it
"""
options = process_options(input_file, output_folder, options)
if options.verbose:
print("Begin tiles details calc")
conf, tile_details = worker_tile_details(input_file, output_folder, options)
if options.verbose:
print("Tiles details calc complete.")
if not options.verbose and not options.quiet:
progress_bar = ProgressBar(len(tile_details))
progress_bar.start()
for tile_detail in tile_details:
create_base_tile(conf, tile_detail)
if not options.verbose and not options.quiet:
progress_bar.log_progress()
create_overview_tiles(conf, output_folder, options)
shutil.rmtree(os.path.dirname(conf.src_file)) | def single_threaded_tiling(input_file, output_folder, **options):
"""Generate tiles using single process.
Keep a single threaded version that stays clear of multiprocessing,
for platforms that would not support it
"""
options = process_options(input_file, output_folder, options)
if options.verbose:
print("Begin tiles details calc")
conf, tile_details = worker_tile_details(input_file, output_folder, options)
if options.verbose:
print("Tiles details calc complete.")
if not options.verbose and not options.quiet:
progress_bar = ProgressBar(len(tile_details))
progress_bar.start()
for tile_detail in tile_details:
create_base_tile(conf, tile_detail)
if not options.verbose and not options.quiet:
progress_bar.log_progress()
create_overview_tiles(conf, output_folder, options)
shutil.rmtree(os.path.dirname(conf.src_file)) |
Python | def multi_threaded_tiling(input_file, output_folder, **options):
"""Generate tiles with multi processing."""
options = process_options(input_file, output_folder, options)
nb_processes = options.nb_processes or 1
(conf_receiver, conf_sender) = Pipe(False)
if options.verbose:
print("Begin tiles details calc")
p = Process(target=worker_tile_details,
args=[input_file, output_folder, options],
kwargs={"send_pipe": conf_sender})
p.start()
# Make sure to consume the queue before joining. If the payload is too big, it won't be put in
# one go in the queue and therefore the sending process will never finish, waiting for space in
# the queue to send data
conf, tile_details = conf_receiver.recv()
p.join()
if options.verbose:
print("Tiles details calc complete.")
# Have to create the Queue through a multiprocessing.Manager to get a Queue Proxy,
# otherwise you can't pass it as a param in the method invoked by the pool...
manager = Manager()
queue = manager.Queue()
pool = Pool(processes=nb_processes)
# TODO: gbataille - check the confs for which each element is an array... one useless level?
# TODO: gbataille - assign an ID to each job for print in verbose mode "ReadRaster Extent ..."
# TODO: gbataille - check memory footprint and time on big image. are they opened x times
for tile_detail in tile_details:
pool.apply_async(create_base_tile, (conf, tile_detail), {"queue": queue})
if not options.verbose and not options.quiet:
p = Process(target=progress_printer_thread, args=[queue, len(tile_details)])
p.start()
pool.close()
pool.join() # Jobs finished
if not options.verbose and not options.quiet:
p.join() # Traces done
create_overview_tiles(conf, output_folder, options)
shutil.rmtree(os.path.dirname(conf.src_file)) | def multi_threaded_tiling(input_file, output_folder, **options):
"""Generate tiles with multi processing."""
options = process_options(input_file, output_folder, options)
nb_processes = options.nb_processes or 1
(conf_receiver, conf_sender) = Pipe(False)
if options.verbose:
print("Begin tiles details calc")
p = Process(target=worker_tile_details,
args=[input_file, output_folder, options],
kwargs={"send_pipe": conf_sender})
p.start()
# Make sure to consume the queue before joining. If the payload is too big, it won't be put in
# one go in the queue and therefore the sending process will never finish, waiting for space in
# the queue to send data
conf, tile_details = conf_receiver.recv()
p.join()
if options.verbose:
print("Tiles details calc complete.")
# Have to create the Queue through a multiprocessing.Manager to get a Queue Proxy,
# otherwise you can't pass it as a param in the method invoked by the pool...
manager = Manager()
queue = manager.Queue()
pool = Pool(processes=nb_processes)
# TODO: gbataille - check the confs for which each element is an array... one useless level?
# TODO: gbataille - assign an ID to each job for print in verbose mode "ReadRaster Extent ..."
# TODO: gbataille - check memory footprint and time on big image. are they opened x times
for tile_detail in tile_details:
pool.apply_async(create_base_tile, (conf, tile_detail), {"queue": queue})
if not options.verbose and not options.quiet:
p = Process(target=progress_printer_thread, args=[queue, len(tile_details)])
p.start()
pool.close()
pool.join() # Jobs finished
if not options.verbose and not options.quiet:
p.join() # Traces done
create_overview_tiles(conf, output_folder, options)
shutil.rmtree(os.path.dirname(conf.src_file)) |
Python | def generate_tiles(input_file, output_folder, **options):
"""Generate tiles from input file.
Arguments:
``input_file`` (str): Path to input file.
``output_folder`` (str): Path to output folder.
``options``: Tile generation options.
Options:
``profile`` (str): Tile cutting profile (mercator,geodetic,raster) - default
'mercator' (Google Maps compatible)
``resampling`` (str): Resampling method (average,near,bilinear,cubic,cubicsp
line,lanczos,antialias) - default 'average'
``s_srs``: The spatial reference system used for the source input data
``zoom``: Zoom levels to render; format: `[int min, int max]`,
`'min-max'` or `int/str zoomlevel`.
``tile_size`` (int): Size of tiles to render - default 256
``resume`` (bool): Resume mode. Generate only missing files.
``srcnodata``: NODATA transparency value to assign to the input data
``tmscompatible`` (bool): When using the geodetic profile, specifies the base
resolution as 0.703125 or 2 tiles at zoom level 0.
``verbose`` (bool): Print status messages to stdout
``kml`` (bool): Generate KML for Google Earth - default for 'geodetic'
profile and 'raster' in EPSG:4326. For a dataset with
different projection use with caution!
``url`` (str): URL address where the generated tiles are going to be published
``webviewer`` (str): Web viewer to generate (all,google,openlayers,none) -
default 'all'
``title`` (str): Title of the map
``copyright`` (str): Copyright for the map
``googlekey`` (str): Google Maps API key from
http://code.google.com/apis/maps/signup.html
``bingkey`` (str): Bing Maps API key from https://www.bingmapsportal.com/
``nb_processes``: Number of processes to use for tiling.
"""
if options:
nb_processes = options.get('nb_processes') or 1
else:
nb_processes = 1
if nb_processes == 1:
single_threaded_tiling(input_file, output_folder, **options)
else:
multi_threaded_tiling(input_file, output_folder, **options) | def generate_tiles(input_file, output_folder, **options):
"""Generate tiles from input file.
Arguments:
``input_file`` (str): Path to input file.
``output_folder`` (str): Path to output folder.
``options``: Tile generation options.
Options:
``profile`` (str): Tile cutting profile (mercator,geodetic,raster) - default
'mercator' (Google Maps compatible)
``resampling`` (str): Resampling method (average,near,bilinear,cubic,cubicsp
line,lanczos,antialias) - default 'average'
``s_srs``: The spatial reference system used for the source input data
``zoom``: Zoom levels to render; format: `[int min, int max]`,
`'min-max'` or `int/str zoomlevel`.
``tile_size`` (int): Size of tiles to render - default 256
``resume`` (bool): Resume mode. Generate only missing files.
``srcnodata``: NODATA transparency value to assign to the input data
``tmscompatible`` (bool): When using the geodetic profile, specifies the base
resolution as 0.703125 or 2 tiles at zoom level 0.
``verbose`` (bool): Print status messages to stdout
``kml`` (bool): Generate KML for Google Earth - default for 'geodetic'
profile and 'raster' in EPSG:4326. For a dataset with
different projection use with caution!
``url`` (str): URL address where the generated tiles are going to be published
``webviewer`` (str): Web viewer to generate (all,google,openlayers,none) -
default 'all'
``title`` (str): Title of the map
``copyright`` (str): Copyright for the map
``googlekey`` (str): Google Maps API key from
http://code.google.com/apis/maps/signup.html
``bingkey`` (str): Bing Maps API key from https://www.bingmapsportal.com/
``nb_processes``: Number of processes to use for tiling.
"""
if options:
nb_processes = options.get('nb_processes') or 1
else:
nb_processes = 1
if nb_processes == 1:
single_threaded_tiling(input_file, output_folder, **options)
else:
multi_threaded_tiling(input_file, output_folder, **options) |
Python | def withcode():
"""
demo for JSON with status code
"""
return jsonify('code'), 203 | def withcode():
"""
demo for JSON with status code
"""
return jsonify('code'), 203 |
Python | def withheader():
"""
demo for JSON with status code and header
"""
return jsonify('header'), 203, {'X': 233} | def withheader():
"""
demo for JSON with status code and header
"""
return jsonify('header'), 203, {'X': 233} |
Python | def update_config(self, **kwargs):
"""
Manually update config.
This function will be triggered when you register this library to Flask
instance, and configs in Flask.config['OPENAPI'] will be used to update.
"""
for key, value in kwargs.items():
setattr(self.config, key, value) | def update_config(self, **kwargs):
"""
Manually update config.
This function will be triggered when you register this library to Flask
instance, and configs in Flask.config['OPENAPI'] will be used to update.
"""
for key, value in kwargs.items():
setattr(self.config, key, value) |
Python | def _register_route(self):
"""
register doc blueprint to Flask app
"""
blueprint = Blueprint(
self.config.name,
__name__,
url_prefix=self.config.url_prefix,
template_folder=self.config.template_folder,
)
# docs
blueprint.add_url_rule(
self.config.endpoint,
self.config.name,
view_func=APIview().as_view(
self.config.name,
view_args=dict(config=self.config),
)
)
# docs/openapi.json
@blueprint.route(f'{self.config.endpoint}<filename>')
def jsonfile(filename):
if filename == self.config.filename:
return jsonify(self.spec)
abort(404)
self.app.register_blueprint(blueprint) | def _register_route(self):
"""
register doc blueprint to Flask app
"""
blueprint = Blueprint(
self.config.name,
__name__,
url_prefix=self.config.url_prefix,
template_folder=self.config.template_folder,
)
# docs
blueprint.add_url_rule(
self.config.endpoint,
self.config.name,
view_func=APIview().as_view(
self.config.name,
view_args=dict(config=self.config),
)
)
# docs/openapi.json
@blueprint.route(f'{self.config.endpoint}<filename>')
def jsonfile(filename):
if filename == self.config.filename:
return jsonify(self.spec)
abort(404)
self.app.register_blueprint(blueprint) |
Python | def spec(self):
"""
Get OpenAPI spec for this Flask app.
"""
if self.config._spec is None:
self._generate_spec()
return self.config._spec | def spec(self):
"""
Get OpenAPI spec for this Flask app.
"""
if self.config._spec is None:
self._generate_spec()
return self.config._spec |
Python | def convert_any(*args, **kwargs):
"""
Handle converter type "any"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'array',
'items': {
'type': 'string',
'enum': args,
}
}
return schema | def convert_any(*args, **kwargs):
"""
Handle converter type "any"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'array',
'items': {
'type': 'string',
'enum': args,
}
}
return schema |
Python | def convert_int(*args, **kwargs):
"""
Handle converter type "int"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'integer',
'format': 'int32',
}
if 'max' in kwargs:
schema['maximum'] = kwargs['max']
if 'min' in kwargs:
schema['minimum'] = kwargs['min']
return schema | def convert_int(*args, **kwargs):
"""
Handle converter type "int"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'integer',
'format': 'int32',
}
if 'max' in kwargs:
schema['maximum'] = kwargs['max']
if 'min' in kwargs:
schema['minimum'] = kwargs['min']
return schema |
Python | def convert_float(*args, **kwargs):
"""
Handle converter type "float"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'number',
'format': 'float',
}
return schema | def convert_float(*args, **kwargs):
"""
Handle converter type "float"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'number',
'format': 'float',
}
return schema |
Python | def convert_uuid(*args, **kwargs):
"""
Handle converter type "uuid"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'string',
'format': 'uuid',
}
return schema | def convert_uuid(*args, **kwargs):
"""
Handle converter type "uuid"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'string',
'format': 'uuid',
}
return schema |
Python | def convert_path(*args, **kwargs):
"""
Handle converter type "path"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'string',
'format': 'path',
}
return schema | def convert_path(*args, **kwargs):
"""
Handle converter type "path"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'string',
'format': 'path',
}
return schema |
Python | def convert_string(*args, **kwargs):
"""
Handle converter type "string"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'string',
}
for prop in ['length', 'maxLength', 'minLength']:
if prop in kwargs:
schema[prop] = kwargs[prop]
return schema | def convert_string(*args, **kwargs):
"""
Handle converter type "string"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {
'type': 'string',
}
for prop in ['length', 'maxLength', 'minLength']:
if prop in kwargs:
schema[prop] = kwargs[prop]
return schema |
Python | def convert_default(*args, **kwargs):
"""
Handle converter type "default"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {'type': 'string'}
return schema | def convert_default(*args, **kwargs):
"""
Handle converter type "default"
:param args:
:param kwargs:
:return: return schema dict
"""
schema = {'type': 'string'}
return schema |
Python | def fn2dict(params: str) -> Tuple[Dict, bool, Optional[str], Optional[str]]:
"""
Roughly the inverse function of "dict2fn".
:param params: string, filename to split up
:return:
"""
param_name_pattern = re.compile(r'(?:[a-zA-Z0-9]+_?)+=')
# Debugging outputs do have an additional prefix which we need to remove
if params.startswith("debug__"):
params = params.removeprefix("debug__")
is_debug = True
pref = "debug"
else:
# FIXME: we only support "debug" as prefix, here
is_debug = False
pref = None
# Get the file extension (assume that there is no "." in the filename that does NOT separate the extension)
_ext_idx = params.find(os.extsep)
if _ext_idx >= 0:
ext = params[(_ext_idx + 1):]
else:
ext = None
# Split the filename and extract the (key, value)-pairs
ks = [m.removesuffix("=") for m in param_name_pattern.findall(params)]
vs = [v.removesuffix("__") for v in param_name_pattern.split(params) if len(v) > 0]
assert len(ks) == len(vs)
# Construct the output dictionary
out = {}
for k, v in zip(ks, vs):
if is_bool(v):
out[k] = bool(v)
elif is_integer(v):
out[k] = int(v)
elif is_float(v):
out[k] = int(v)
else:
assert isinstance(v, str)
out[k] = v
return out, is_debug, pref, ext | def fn2dict(params: str) -> Tuple[Dict, bool, Optional[str], Optional[str]]:
"""
Roughly the inverse function of "dict2fn".
:param params: string, filename to split up
:return:
"""
param_name_pattern = re.compile(r'(?:[a-zA-Z0-9]+_?)+=')
# Debugging outputs do have an additional prefix which we need to remove
if params.startswith("debug__"):
params = params.removeprefix("debug__")
is_debug = True
pref = "debug"
else:
# FIXME: we only support "debug" as prefix, here
is_debug = False
pref = None
# Get the file extension (assume that there is no "." in the filename that does NOT separate the extension)
_ext_idx = params.find(os.extsep)
if _ext_idx >= 0:
ext = params[(_ext_idx + 1):]
else:
ext = None
# Split the filename and extract the (key, value)-pairs
ks = [m.removesuffix("=") for m in param_name_pattern.findall(params)]
vs = [v.removesuffix("__") for v in param_name_pattern.split(params) if len(v) > 0]
assert len(ks) == len(vs)
# Construct the output dictionary
out = {}
for k, v in zip(ks, vs):
if is_bool(v):
out[k] = bool(v)
elif is_integer(v):
out[k] = int(v)
elif is_float(v):
out[k] = int(v)
else:
assert isinstance(v, str)
out[k] = v
return out, is_debug, pref, ext |
Python | def load_topk__cand_set_info(setting: Dict, basedir: str = ".") -> pd.DataFrame:
"""
Load the Top-k accuracies in the "comparison" folder. These are the results for the comparison methods, i.e.
RT filtering, LogP scoring and RO score integration approaches.
"""
df = []
for ifn in sorted(glob.glob(
os.path.join(basedir, dict2fn(setting), dict2fn({"spl": "*"}, pref="cand_set_info", ext="tsv"))
)):
# Parse the actual parameters from the basename (the setting might contain wildcards)
params, _, _, _ = fn2dict(ifn.split(os.sep)[-2]) # /path/to/PARAMS/file.tsv --> PARAMS
# Read the top-k performance results
_df = pd.read_csv(ifn, sep="\t")
# Add the parameters to the dataframe
for k, v in params.items():
if k not in _df.columns:
_df[k] = v
# Add the evaluation split index
_df["eval_indx"] = int(
os.path.basename(ifn).removesuffix(os.extsep + "tsv").removeprefix("top_k__").split("=")[1]
)
df.append(_df)
df = pd.concat(df, ignore_index=True)
return df | def load_topk__cand_set_info(setting: Dict, basedir: str = ".") -> pd.DataFrame:
"""
Load the Top-k accuracies in the "comparison" folder. These are the results for the comparison methods, i.e.
RT filtering, LogP scoring and RO score integration approaches.
"""
df = []
for ifn in sorted(glob.glob(
os.path.join(basedir, dict2fn(setting), dict2fn({"spl": "*"}, pref="cand_set_info", ext="tsv"))
)):
# Parse the actual parameters from the basename (the setting might contain wildcards)
params, _, _, _ = fn2dict(ifn.split(os.sep)[-2]) # /path/to/PARAMS/file.tsv --> PARAMS
# Read the top-k performance results
_df = pd.read_csv(ifn, sep="\t")
# Add the parameters to the dataframe
for k, v in params.items():
if k not in _df.columns:
_df[k] = v
# Add the evaluation split index
_df["eval_indx"] = int(
os.path.basename(ifn).removesuffix(os.extsep + "tsv").removeprefix("top_k__").split("=")[1]
)
df.append(_df)
df = pd.concat(df, ignore_index=True)
return df |
Python | def _get_topk(x, k, method):
"""
Task: Pandas aggregation function to compute the top-k acc.
"""
out = 0.0
if method == "average":
for xi in x:
out += (np.mean(xi) <= k)
elif method == "csi":
for xi in x:
y = np.arange(xi[0], xi[1] + 1)
for yi in y:
if yi <= k:
out += (1.0 / len(y))
else:
raise ValueError("Invalid method: '%s'" % method)
# Get accuracy as percentages
out /= len(x)
out *= 100
return out | def _get_topk(x, k, method):
"""
Task: Pandas aggregation function to compute the top-k acc.
"""
out = 0.0
if method == "average":
for xi in x:
out += (np.mean(xi) <= k)
elif method == "csi":
for xi in x:
y = np.arange(xi[0], xi[1] + 1)
for yi in y:
if yi <= k:
out += (1.0 / len(y))
else:
raise ValueError("Invalid method: '%s'" % method)
# Get accuracy as percentages
out /= len(x)
out *= 100
return out |
Python | def _aggregate_and_filter_classyfire_classes(df, min_class_support, cf_level):
"""
Task: Group and aggregate the results by the ClassyFire class-level and determine the support for each class.
Then, remove all classes with too little support. Purpose is to get the "relevant" class and superclass
relationships to determine the colors and orders for the plotting.
"""
# We consider only unique molecular structures to compute the CF class support
tmp = df.drop_duplicates("correct_structure")
# Group by the ClassyFire level
tmp = tmp.groupby("classyfire_%s" % cf_level)
if cf_level == "class":
tmp = tmp.aggregate({
"molecule_identifier": lambda x: len(x),
"classyfire_superclass": lambda x: x.iloc[0]
})
elif cf_level == "superclass":
tmp = tmp.aggregate({
"molecule_identifier": lambda x: len(x),
"classyfire_class": lambda x: ",".join([xi for xi in x if not pd.isna(xi)])
})
else:
raise ValueError("Invalid ClassyFire level: '%s'" % cf_level)
tmp = tmp \
.rename({"molecule_identifier": "n_class_support"}, axis=1) \
.reset_index() \
.sort_values(by="classyfire_superclass")
return tmp[tmp["n_class_support"] >= min_class_support] | def _aggregate_and_filter_classyfire_classes(df, min_class_support, cf_level):
"""
Task: Group and aggregate the results by the ClassyFire class-level and determine the support for each class.
Then, remove all classes with too little support. Purpose is to get the "relevant" class and superclass
relationships to determine the colors and orders for the plotting.
"""
# We consider only unique molecular structures to compute the CF class support
tmp = df.drop_duplicates("correct_structure")
# Group by the ClassyFire level
tmp = tmp.groupby("classyfire_%s" % cf_level)
if cf_level == "class":
tmp = tmp.aggregate({
"molecule_identifier": lambda x: len(x),
"classyfire_superclass": lambda x: x.iloc[0]
})
elif cf_level == "superclass":
tmp = tmp.aggregate({
"molecule_identifier": lambda x: len(x),
"classyfire_class": lambda x: ",".join([xi for xi in x if not pd.isna(xi)])
})
else:
raise ValueError("Invalid ClassyFire level: '%s'" % cf_level)
tmp = tmp \
.rename({"molecule_identifier": "n_class_support"}, axis=1) \
.reset_index() \
.sort_values(by="classyfire_superclass")
return tmp[tmp["n_class_support"] >= min_class_support] |
Python | def table__top_k_acc_per_dataset_with_significance(
results: pd.DataFrame, p_level: float = 0.05, ks: Optional[List[int]] = None, top_k_method: str = "csi",
test: str = "ttest", decimals: int = 1
) -> pd.DataFrame:
"""
Function to generate the table comparing "Only MS" with "MS + RT". Test for significance is performed and indicated,
if "MS + RT" significantly outperforms "Only MS".
:param results: pd.DataFrame, results
:param p_level:
:param ks:
:param top_k_method:
:param test:
:return:
"""
if ks is None:
ks = [1, 5, 10, 20]
# Check that all needed columns are provide
for column in ["k", "top_k_method", "scoring_method", "dataset", "eval_indx", "top_k_acc"]:
if column not in results.columns:
raise ValueError("Column {} is missing from the data-frame {}".format(column, results.columns))
# Collect all "MS + RT" settings, e.g., "MS + RT" or ["MS + RT (global)", "MS + RT (local)"], ...
ms_p_rt_labels = results[results["scoring_method"] != "Only MS"]["scoring_method"].unique().tolist()
if len(ms_p_rt_labels) < 1:
raise ValueError(
"There must be at least one other scoring method other than 'Only MS': {}".format(
results["scoring_method"].unique().tolist()
)
)
# Subset results for the specified top-ks and the top-k determination method (casmi or csi)
_results = results[(results["k"].isin(ks)) & (results["top_k_method"] == top_k_method)]
results_out = pd.DataFrame()
for (k, ds), res in _results.groupby(["k", "dataset"]):
# Separate the "Only MS" setting
_only_ms = res[res["scoring_method"] == "Only MS"].sort_values(by="eval_indx")
# New row(s) for the output data-frame
_df = {
"k": k,
"dataset": ds,
"n_samples": len(_only_ms),
"scoring_method": ["Only MS"],
"top_k_acc": [np.mean(_only_ms["top_k_acc"]).item()],
"p_value": [1.0]
}
# Load the "MS + RT" for each label
for l in ms_p_rt_labels:
_ms_p_rt = res[res["scoring_method"] == l].sort_values(by="eval_indx")
_df["scoring_method"].append(l)
_df["top_k_acc"].append(np.mean(_ms_p_rt["top_k_acc"]).item())
if len(_ms_p_rt) <= 1:
# We need to have more than one value to perform a significance test
_p = np.nan
else:
# Perform the significance test
if test == "wilcoxon":
_p = wilcoxon(_only_ms["top_k_acc"], _ms_p_rt["top_k_acc"], alternative="less")[1]
elif test == "ttest":
_p = ttest_rel(_only_ms["top_k_acc"], _ms_p_rt["top_k_acc"], alternative="less")[1]
else:
raise ValueError("Invalid significance test: %s" % test)
_df["p_value"].append(_p)
# Convert to accuracy strings
_df["top_k_acc__as_labels"] = []
for idx, s in enumerate(_df["top_k_acc"]):
_is_best = (
np.round(_df["top_k_acc"][idx], decimals=decimals)
==
np.max(np.round(_df["top_k_acc"], decimals=decimals))
)
_is_sign = False if np.isnan(_df["p_value"][idx]) else (_df["p_value"][idx] <= p_level)
_lab = "{}".format(np.round(_df["top_k_acc"][idx], decimals=decimals))
if _is_best:
_lab = "| " + _lab
if _is_sign:
_lab = _lab + " *"
_df["top_k_acc__as_labels"].append(_lab)
results_out = pd.concat((results_out, pd.DataFrame(_df)), ignore_index=True)
return results_out | def table__top_k_acc_per_dataset_with_significance(
results: pd.DataFrame, p_level: float = 0.05, ks: Optional[List[int]] = None, top_k_method: str = "csi",
test: str = "ttest", decimals: int = 1
) -> pd.DataFrame:
"""
Function to generate the table comparing "Only MS" with "MS + RT". Test for significance is performed and indicated,
if "MS + RT" significantly outperforms "Only MS".
:param results: pd.DataFrame, results
:param p_level:
:param ks:
:param top_k_method:
:param test:
:return:
"""
if ks is None:
ks = [1, 5, 10, 20]
# Check that all needed columns are provide
for column in ["k", "top_k_method", "scoring_method", "dataset", "eval_indx", "top_k_acc"]:
if column not in results.columns:
raise ValueError("Column {} is missing from the data-frame {}".format(column, results.columns))
# Collect all "MS + RT" settings, e.g., "MS + RT" or ["MS + RT (global)", "MS + RT (local)"], ...
ms_p_rt_labels = results[results["scoring_method"] != "Only MS"]["scoring_method"].unique().tolist()
if len(ms_p_rt_labels) < 1:
raise ValueError(
"There must be at least one other scoring method other than 'Only MS': {}".format(
results["scoring_method"].unique().tolist()
)
)
# Subset results for the specified top-ks and the top-k determination method (casmi or csi)
_results = results[(results["k"].isin(ks)) & (results["top_k_method"] == top_k_method)]
results_out = pd.DataFrame()
for (k, ds), res in _results.groupby(["k", "dataset"]):
# Separate the "Only MS" setting
_only_ms = res[res["scoring_method"] == "Only MS"].sort_values(by="eval_indx")
# New row(s) for the output data-frame
_df = {
"k": k,
"dataset": ds,
"n_samples": len(_only_ms),
"scoring_method": ["Only MS"],
"top_k_acc": [np.mean(_only_ms["top_k_acc"]).item()],
"p_value": [1.0]
}
# Load the "MS + RT" for each label
for l in ms_p_rt_labels:
_ms_p_rt = res[res["scoring_method"] == l].sort_values(by="eval_indx")
_df["scoring_method"].append(l)
_df["top_k_acc"].append(np.mean(_ms_p_rt["top_k_acc"]).item())
if len(_ms_p_rt) <= 1:
# We need to have more than one value to perform a significance test
_p = np.nan
else:
# Perform the significance test
if test == "wilcoxon":
_p = wilcoxon(_only_ms["top_k_acc"], _ms_p_rt["top_k_acc"], alternative="less")[1]
elif test == "ttest":
_p = ttest_rel(_only_ms["top_k_acc"], _ms_p_rt["top_k_acc"], alternative="less")[1]
else:
raise ValueError("Invalid significance test: %s" % test)
_df["p_value"].append(_p)
# Convert to accuracy strings
_df["top_k_acc__as_labels"] = []
for idx, s in enumerate(_df["top_k_acc"]):
_is_best = (
np.round(_df["top_k_acc"][idx], decimals=decimals)
==
np.max(np.round(_df["top_k_acc"], decimals=decimals))
)
_is_sign = False if np.isnan(_df["p_value"][idx]) else (_df["p_value"][idx] <= p_level)
_lab = "{}".format(np.round(_df["top_k_acc"][idx], decimals=decimals))
if _is_best:
_lab = "| " + _lab
if _is_sign:
_lab = _lab + " *"
_df["top_k_acc__as_labels"].append(_lab)
results_out = pd.concat((results_out, pd.DataFrame(_df)), ignore_index=True)
return results_out |
Python | def fit(self, X, y=None):
"""
Fit the Bouwmeester feature selection based on the feature correlation
"""
# Find highly correlated features and keep only one feature
R = np.abs(np.corrcoef(X.T)) # Absolute correlation between features
G = nx.from_numpy_array(R > self.corr_threshold) # Graph connecting the highly correlated features
self.support_mask_ = np.zeros(X.shape[1], dtype=bool)
for cc in nx.connected_components(G):
# Keep one node / feature per group of correlated features
self.support_mask_[cc.pop()] = True
return self | def fit(self, X, y=None):
"""
Fit the Bouwmeester feature selection based on the feature correlation
"""
# Find highly correlated features and keep only one feature
R = np.abs(np.corrcoef(X.T)) # Absolute correlation between features
G = nx.from_numpy_array(R > self.corr_threshold) # Graph connecting the highly correlated features
self.support_mask_ = np.zeros(X.shape[1], dtype=bool)
for cc in nx.connected_components(G):
# Keep one node / feature per group of correlated features
self.support_mask_[cc.pop()] = True
return self |
Python | def _max_margin_wrapper(candidates, make_order_prob, D, random_state):
"""
Wrapper to compute the max-marginals in parallel
"""
return RandomTreeFactorGraph(
candidates, make_order_probs=make_order_prob, random_state=random_state, D=D,
remove_edges_with_zero_rt_diff=True
).max_product().get_max_marginals(normalize=True) | def _max_margin_wrapper(candidates, make_order_prob, D, random_state):
"""
Wrapper to compute the max-marginals in parallel
"""
return RandomTreeFactorGraph(
candidates, make_order_probs=make_order_prob, random_state=random_state, D=D,
remove_edges_with_zero_rt_diff=True
).max_product().get_max_marginals(normalize=True) |
Python | def relative_error(y_true: np.ndarray, y_pred: np.ndarray) -> np.ndarray:
"""
Function to compute the relative RT prediction error.
:param y_true: array-like, shape = (n_samples, ), true RTs
:param y_pred: array-like, shape = (n_samples, ), predicted RTs
:return: scalar, relative prediction error
"""
epsilon = np.finfo(np.float64).eps
return np.abs(y_pred - y_true) / np.maximum(np.abs(y_true), epsilon) | def relative_error(y_true: np.ndarray, y_pred: np.ndarray) -> np.ndarray:
"""
Function to compute the relative RT prediction error.
:param y_true: array-like, shape = (n_samples, ), true RTs
:param y_pred: array-like, shape = (n_samples, ), predicted RTs
:return: scalar, relative prediction error
"""
epsilon = np.finfo(np.float64).eps
return np.abs(y_pred - y_true) / np.maximum(np.abs(y_true), epsilon) |
Python | def filter_descriptors(l_rdkit_desc) -> List[Tuple[str, Callable]]:
"""
Only keep the descriptors used by Bouwmeester et al. (2019)
"""
return [(dname, dfun) for dname, dfun in l_rdkit_desc if dname in BOUWMEESTER_DESCRIPTOR_SET] | def filter_descriptors(l_rdkit_desc) -> List[Tuple[str, Callable]]:
"""
Only keep the descriptors used by Bouwmeester et al. (2019)
"""
return [(dname, dfun) for dname, dfun in l_rdkit_desc if dname in BOUWMEESTER_DESCRIPTOR_SET] |
Python | def combine_margins_and_get_top_k(ssvm_model_result_dirs: List[str], dataset: str, sample_idx: int, output_dir: str):
"""
Function to combine the marginals scores (mu) of the candidates predicted by different SSVM models.
:param ssvm_model_result_dirs: list of strings, directories containing the results for the different SSVM models.
:param dataset: string, identifier of the dataset
:param sample_idx: scalar, index of the random evaluation sample (MS-feature sequence) for which the margins should
be combined.
:param output_dir: string, output directory for the aggregated marginals and top-k accuracies.
:return:
"""
marginals_out__msplrt = None
marginals_out__onlyms = None
# Top-k accuracy performance
df_top_k = pd.DataFrame()
df_top_k_max_models = pd.DataFrame()
# We load the marginals associated with the different SSVM models in a random order
for i, idir in enumerate(np.random.RandomState(sample_idx).permutation(ssvm_model_result_dirs)):
ifn = os.path.join(idir, dict2fn({"spl": sample_idx}, pref="marginals", ext="pkl.gz"))
with gzip.open(ifn, "rb") as ifile:
# Load the marginals (dictionary also contains the Only MS scores)
_marginals__msplrt = pickle.load(ifile) # type: dict
# Extract the Only MS scores
_marginals__onlyms = extract_ms_score(_marginals__msplrt) # type: dict
assert _marginals__onlyms.keys() == _marginals__msplrt.keys()
# Aggregate the Only-MS scores (MS2 scorers) by their candidate aggregation identifier. For example, if the
# identifier is "inchikey1", than for all candidates with the same inchikey1 the highest MS2 score is chosen.
# The output candidate set only contains a unique set of candidate identifiers.
_marginals__onlyms = aggregate_candidates(
_marginals__onlyms, args.candidate_aggregation_identifier
) # type: dict
if i == 0:
# We use the first SSVM-model as reference
marginals_out__msplrt = _marginals__msplrt
marginals_out__onlyms = _marginals__onlyms
# For the marginals we need to construct a matrix with marginals scores in the row and columns corresponding
# to the SSVM-models
for s in marginals_out__msplrt:
marginals_out__msplrt[s]["score"] = marginals_out__msplrt[s]["score"][:, np.newaxis]
else:
# Combine the marginals
assert marginals_out__msplrt.keys() == _marginals__msplrt.keys()
assert marginals_out__onlyms.keys() == _marginals__onlyms.keys()
for s in _marginals__msplrt:
assert marginals_out__msplrt[s].keys() == _marginals__msplrt[s].keys()
assert marginals_out__onlyms[s].keys() == _marginals__onlyms[s].keys()
# Perform some sanity checks
for k in [
"spectrum_id", "correct_structure", "index_of_correct_structure", "label", "n_cand", "score"
]:
if k == "spectrum_id":
# Spectrum ID (=accession) needs to match
assert marginals_out__msplrt[s][k].get("spectrum_id") == _marginals__msplrt[s][k].get("spectrum_id")
assert marginals_out__onlyms[s][k].get("spectrum_id") == _marginals__onlyms[s][k].get("spectrum_id")
assert _marginals__msplrt[s][k].get("spectrum_id") == _marginals__onlyms[s][k].get("spectrum_id")
elif k == "score":
# Score should only be equal for Only-MS
assert np.all(marginals_out__onlyms[s][k] == _marginals__onlyms[s][k])
else:
assert np.all(marginals_out__msplrt[s][k] == _marginals__msplrt[s][k])
assert np.all(marginals_out__onlyms[s][k] == _marginals__onlyms[s][k])
# Add up the normalized marginals
assert np.allclose(1.0, np.max(_marginals__msplrt[s]["score"]))
assert np.allclose(1.0, np.max(_marginals__onlyms[s]["score"]))
marginals_out__msplrt[s]["score"] = np.hstack((marginals_out__msplrt[s]["score"], _marginals__msplrt[s]["score"][:, np.newaxis]))
assert marginals_out__msplrt[s]["score"].shape == (marginals_out__msplrt[s]["n_cand"], i + 1)
# Calculate the ranking performance
for km in ["csi"]: # could also use "casmi"
# Aggregated marginals
_marginals__msplrt = aggregate_candidates(aggregate_scores(marginals_out__msplrt), args.candidate_aggregation_identifier)
for s in _marginals__msplrt:
assert np.all(_marginals__msplrt[s]["label"] == marginals_out__onlyms[s]["label"])
# LC-MS2Struct performance
_tmp = get_topk_score_df(None, _marginals__msplrt, topk_method=km, scoring_method="MS + RT") \
.assign(n_models=(i + 1), eval_indx=sample_idx, dataset=dataset)
_tmp["top_k_acc"] = (_tmp["correct_leq_k"] / _tmp["seq_length"]) * 100
# Only-MS performance
_tmp_baseline = get_topk_score_df(None, marginals_out__onlyms, topk_method=km, scoring_method="Only MS") \
.assign(n_models=(i + 1), eval_indx=sample_idx, dataset=dataset)
_tmp_baseline["top_k_acc"] = (_tmp_baseline["correct_leq_k"] / _tmp_baseline["seq_length"]) * 100
df_top_k = pd.concat((df_top_k, _tmp, _tmp_baseline), ignore_index=True)
if i == (len(ssvm_model_result_dirs) - 1):
df_top_k_max_models = pd.concat((df_top_k_max_models, _tmp, _tmp_baseline), ignore_index=True)
# Write out the aggregated marginals if requested
if args.write_out_averaged_margins:
with gzip.open(
os.path.join(output_dir, dict2fn({"spl": sample_idx}, pref="marginals", ext="pkl.gz")), "wb"
) as ofile:
marginals_out__msplrt = aggregate_candidates(
aggregate_scores(marginals_out__msplrt), args.candidate_aggregation_identifier
) # Aggregate the max-marginal scores if the different SSVM models
# Add the Only MS scores again
for s in marginals_out__msplrt:
assert np.all(marginals_out__msplrt[s]["label"] == marginals_out__onlyms[s]["label"])
marginals_out__msplrt[s]["ms_score"] = marginals_out__onlyms[s]["score"]
# Write out the dictionary
pickle.dump(marginals_out__msplrt, ofile)
return df_top_k, df_top_k_max_models | def combine_margins_and_get_top_k(ssvm_model_result_dirs: List[str], dataset: str, sample_idx: int, output_dir: str):
"""
Function to combine the marginals scores (mu) of the candidates predicted by different SSVM models.
:param ssvm_model_result_dirs: list of strings, directories containing the results for the different SSVM models.
:param dataset: string, identifier of the dataset
:param sample_idx: scalar, index of the random evaluation sample (MS-feature sequence) for which the margins should
be combined.
:param output_dir: string, output directory for the aggregated marginals and top-k accuracies.
:return:
"""
marginals_out__msplrt = None
marginals_out__onlyms = None
# Top-k accuracy performance
df_top_k = pd.DataFrame()
df_top_k_max_models = pd.DataFrame()
# We load the marginals associated with the different SSVM models in a random order
for i, idir in enumerate(np.random.RandomState(sample_idx).permutation(ssvm_model_result_dirs)):
ifn = os.path.join(idir, dict2fn({"spl": sample_idx}, pref="marginals", ext="pkl.gz"))
with gzip.open(ifn, "rb") as ifile:
# Load the marginals (dictionary also contains the Only MS scores)
_marginals__msplrt = pickle.load(ifile) # type: dict
# Extract the Only MS scores
_marginals__onlyms = extract_ms_score(_marginals__msplrt) # type: dict
assert _marginals__onlyms.keys() == _marginals__msplrt.keys()
# Aggregate the Only-MS scores (MS2 scorers) by their candidate aggregation identifier. For example, if the
# identifier is "inchikey1", than for all candidates with the same inchikey1 the highest MS2 score is chosen.
# The output candidate set only contains a unique set of candidate identifiers.
_marginals__onlyms = aggregate_candidates(
_marginals__onlyms, args.candidate_aggregation_identifier
) # type: dict
if i == 0:
# We use the first SSVM-model as reference
marginals_out__msplrt = _marginals__msplrt
marginals_out__onlyms = _marginals__onlyms
# For the marginals we need to construct a matrix with marginals scores in the row and columns corresponding
# to the SSVM-models
for s in marginals_out__msplrt:
marginals_out__msplrt[s]["score"] = marginals_out__msplrt[s]["score"][:, np.newaxis]
else:
# Combine the marginals
assert marginals_out__msplrt.keys() == _marginals__msplrt.keys()
assert marginals_out__onlyms.keys() == _marginals__onlyms.keys()
for s in _marginals__msplrt:
assert marginals_out__msplrt[s].keys() == _marginals__msplrt[s].keys()
assert marginals_out__onlyms[s].keys() == _marginals__onlyms[s].keys()
# Perform some sanity checks
for k in [
"spectrum_id", "correct_structure", "index_of_correct_structure", "label", "n_cand", "score"
]:
if k == "spectrum_id":
# Spectrum ID (=accession) needs to match
assert marginals_out__msplrt[s][k].get("spectrum_id") == _marginals__msplrt[s][k].get("spectrum_id")
assert marginals_out__onlyms[s][k].get("spectrum_id") == _marginals__onlyms[s][k].get("spectrum_id")
assert _marginals__msplrt[s][k].get("spectrum_id") == _marginals__onlyms[s][k].get("spectrum_id")
elif k == "score":
# Score should only be equal for Only-MS
assert np.all(marginals_out__onlyms[s][k] == _marginals__onlyms[s][k])
else:
assert np.all(marginals_out__msplrt[s][k] == _marginals__msplrt[s][k])
assert np.all(marginals_out__onlyms[s][k] == _marginals__onlyms[s][k])
# Add up the normalized marginals
assert np.allclose(1.0, np.max(_marginals__msplrt[s]["score"]))
assert np.allclose(1.0, np.max(_marginals__onlyms[s]["score"]))
marginals_out__msplrt[s]["score"] = np.hstack((marginals_out__msplrt[s]["score"], _marginals__msplrt[s]["score"][:, np.newaxis]))
assert marginals_out__msplrt[s]["score"].shape == (marginals_out__msplrt[s]["n_cand"], i + 1)
# Calculate the ranking performance
for km in ["csi"]: # could also use "casmi"
# Aggregated marginals
_marginals__msplrt = aggregate_candidates(aggregate_scores(marginals_out__msplrt), args.candidate_aggregation_identifier)
for s in _marginals__msplrt:
assert np.all(_marginals__msplrt[s]["label"] == marginals_out__onlyms[s]["label"])
# LC-MS2Struct performance
_tmp = get_topk_score_df(None, _marginals__msplrt, topk_method=km, scoring_method="MS + RT") \
.assign(n_models=(i + 1), eval_indx=sample_idx, dataset=dataset)
_tmp["top_k_acc"] = (_tmp["correct_leq_k"] / _tmp["seq_length"]) * 100
# Only-MS performance
_tmp_baseline = get_topk_score_df(None, marginals_out__onlyms, topk_method=km, scoring_method="Only MS") \
.assign(n_models=(i + 1), eval_indx=sample_idx, dataset=dataset)
_tmp_baseline["top_k_acc"] = (_tmp_baseline["correct_leq_k"] / _tmp_baseline["seq_length"]) * 100
df_top_k = pd.concat((df_top_k, _tmp, _tmp_baseline), ignore_index=True)
if i == (len(ssvm_model_result_dirs) - 1):
df_top_k_max_models = pd.concat((df_top_k_max_models, _tmp, _tmp_baseline), ignore_index=True)
# Write out the aggregated marginals if requested
if args.write_out_averaged_margins:
with gzip.open(
os.path.join(output_dir, dict2fn({"spl": sample_idx}, pref="marginals", ext="pkl.gz")), "wb"
) as ofile:
marginals_out__msplrt = aggregate_candidates(
aggregate_scores(marginals_out__msplrt), args.candidate_aggregation_identifier
) # Aggregate the max-marginal scores if the different SSVM models
# Add the Only MS scores again
for s in marginals_out__msplrt:
assert np.all(marginals_out__msplrt[s]["label"] == marginals_out__onlyms[s]["label"])
marginals_out__msplrt[s]["ms_score"] = marginals_out__onlyms[s]["score"]
# Write out the dictionary
pickle.dump(marginals_out__msplrt, ofile)
return df_top_k, df_top_k_max_models |
Python | def load_data(data_dir):
"""
Load image data from directory `data_dir`.
Assume `data_dir` has one directory named after each category, numbered
0 through NUM_CATEGORIES - 1. Inside each category directory will be some
number of image files.
Return tuple `(images, labels)`. `images` should be a list of all
of the images in the data directory, where each image is formatted as a
numpy ndarray with dimensions IMG_WIDTH x IMG_HEIGHT x 3. `labels` should
be a list of integer labels, representing the categories for each of the
corresponding `images`.
"""
raise NotImplementedError | def load_data(data_dir):
"""
Load image data from directory `data_dir`.
Assume `data_dir` has one directory named after each category, numbered
0 through NUM_CATEGORIES - 1. Inside each category directory will be some
number of image files.
Return tuple `(images, labels)`. `images` should be a list of all
of the images in the data directory, where each image is formatted as a
numpy ndarray with dimensions IMG_WIDTH x IMG_HEIGHT x 3. `labels` should
be a list of integer labels, representing the categories for each of the
corresponding `images`.
"""
raise NotImplementedError |
Python | def echo(callback, *args, **kwargs):
"""Echo args back to callback
For testing purposes only.
"""
callback(*args, **kwargs) | def echo(callback, *args, **kwargs):
"""Echo args back to callback
For testing purposes only.
"""
callback(*args, **kwargs) |
Python | def server_info(callback: collections.Callable) -> None:
"""Return information about the current running version of the server"""
from backend.server import RUN_DATE, VERSION
callback(start_date=RUN_DATE, version=VERSION) | def server_info(callback: collections.Callable) -> None:
"""Return information about the current running version of the server"""
from backend.server import RUN_DATE, VERSION
callback(start_date=RUN_DATE, version=VERSION) |
Python | def schedule_events(self, events: list, location='last'):
"""Schedule events in the queue at `location`. Note the handler might not be currently processing.
location = 'immediately' | 'next up' | 'last'
"""
# hack
event_queue_was_empty = not self._event_queue
if location == 'immediately':
for event in events:
self._push_event(event)
elif location == 'next up':
self._event_queue = events + self._event_queue
elif location == 'last':
self._event_queue += events
# hack: this gets checked every time an event is scheduled
if self.processing and event_queue_was_empty and (location == 'next up' or location == 'last'):
self._push_next_event() | def schedule_events(self, events: list, location='last'):
"""Schedule events in the queue at `location`. Note the handler might not be currently processing.
location = 'immediately' | 'next up' | 'last'
"""
# hack
event_queue_was_empty = not self._event_queue
if location == 'immediately':
for event in events:
self._push_event(event)
elif location == 'next up':
self._event_queue = events + self._event_queue
elif location == 'last':
self._event_queue += events
# hack: this gets checked every time an event is scheduled
if self.processing and event_queue_was_empty and (location == 'next up' or location == 'last'):
self._push_next_event() |
Python | def _push_next_event(self):
"""Push the next event in the queue into the stack"""
if self._event_queue:
# retrieve next event on queue and push into stack
self._push_event(self._event_queue.pop(0))
else:
self.delegate.event_queue_did_empty(self) | def _push_next_event(self):
"""Push the next event in the queue into the stack"""
if self._event_queue:
# retrieve next event on queue and push into stack
self._push_event(self._event_queue.pop(0))
else:
self.delegate.event_queue_did_empty(self) |
Python | def _push_event(self, event):
"""Push event into stack and evoke it"""
self._event_stack.append(event)
event.evoke() | def _push_event(self, event):
"""Push event into stack and evoke it"""
self._event_stack.append(event)
event.evoke() |
Python | def describe(condition: float) -> str:
"""Describes a number between 0 and 1 using some funny words
A big number is interpreted as a good thing, and small bad.
"""
assert 0 <= condition <= 1
if condition == 1:
condition = .999
small_adverbs = ['a little bit', 'fairly', 'somewhat', 'mildly']
big_adverbs = ['very', 'extremely', 'horrifyingly']
good_adjectives = ['shiny', 'amazing', 'clean', 'well-kept', 'normal']
medium_adjectives = ['bad', 'broken', 'smelly', 'rusty', 'windy']
bad_adjectives = ['dilapidated', 'gross', 'infected', 'dangerous', 'terrible']
adverbs = [small_adverbs, big_adverbs]
adjectives = [bad_adjectives, medium_adjectives, good_adjectives]
# It has six levels of detail, so we need to get the value from 0-5:
number = int(math.floor(condition * 6.0))
if number > 4:
return random.choice(adverbs[number % 2]) + ' ' + random.choice(good_adjectives)
else:
return random.choice(adverbs[1 - (number % 2)]) + ' ' + random.choice(
adjectives[int(math.floor(number / 3.0))]) | def describe(condition: float) -> str:
"""Describes a number between 0 and 1 using some funny words
A big number is interpreted as a good thing, and small bad.
"""
assert 0 <= condition <= 1
if condition == 1:
condition = .999
small_adverbs = ['a little bit', 'fairly', 'somewhat', 'mildly']
big_adverbs = ['very', 'extremely', 'horrifyingly']
good_adjectives = ['shiny', 'amazing', 'clean', 'well-kept', 'normal']
medium_adjectives = ['bad', 'broken', 'smelly', 'rusty', 'windy']
bad_adjectives = ['dilapidated', 'gross', 'infected', 'dangerous', 'terrible']
adverbs = [small_adverbs, big_adverbs]
adjectives = [bad_adjectives, medium_adjectives, good_adjectives]
# It has six levels of detail, so we need to get the value from 0-5:
number = int(math.floor(condition * 6.0))
if number > 4:
return random.choice(adverbs[number % 2]) + ' ' + random.choice(good_adjectives)
else:
return random.choice(adverbs[1 - (number % 2)]) + ' ' + random.choice(
adjectives[int(math.floor(number / 3.0))]) |
Python | def _register_callback(self, callback):
"""Register a new callback object and return the `callback_id`"""
# generate callback id
new_callback_id = self._generate_callback_id()
self._callback_with_id[new_callback_id] = callback
return new_callback_id | def _register_callback(self, callback):
"""Register a new callback object and return the `callback_id`"""
# generate callback id
new_callback_id = self._generate_callback_id()
self._callback_with_id[new_callback_id] = callback
return new_callback_id |
Python | def _retrieve_callback(self, callback_id):
"""Return registered callback object with matching `callback_id`
An exception is raised if the callback cannot be found.
"""
try:
return self._callback_with_id.pop(callback_id)
except KeyError:
# todo: is there a more fitting type of error?
raise InvalidArgumentError(
"Cannot find callback object using callback_id",
method='handle_callback',
args=(callback_id,)
) | def _retrieve_callback(self, callback_id):
"""Return registered callback object with matching `callback_id`
An exception is raised if the callback cannot be found.
"""
try:
return self._callback_with_id.pop(callback_id)
except KeyError:
# todo: is there a more fitting type of error?
raise InvalidArgumentError(
"Cannot find callback object using callback_id",
method='handle_callback',
args=(callback_id,)
) |
Python | def _generate_callback(self, callback_id):
"""Create a wrapper object that, when called, will send a `handle_callback` message over the socket.
The kwargs passed to the wrapper object, along with the `callback_id` when creating the object, will be passed as arguments to the `handle_callback` message.
"""
def callback(**callback_args):
# create message
msg = {
'method': 'handle_callback',
'args': {
'callback_id': callback_id,
}
}
if callback_args:
msg['args']['callback_args'] = callback_args
# send message to client
# problem: what if delegate disconnects before callback?
self.write_message(json.dumps(msg))
return callback | def _generate_callback(self, callback_id):
"""Create a wrapper object that, when called, will send a `handle_callback` message over the socket.
The kwargs passed to the wrapper object, along with the `callback_id` when creating the object, will be passed as arguments to the `handle_callback` message.
"""
def callback(**callback_args):
# create message
msg = {
'method': 'handle_callback',
'args': {
'callback_id': callback_id,
}
}
if callback_args:
msg['args']['callback_args'] = callback_args
# send message to client
# problem: what if delegate disconnects before callback?
self.write_message(json.dumps(msg))
return callback |
Python | def sending(method):
"""A decorator that calls the method on the client by sending a message over websocket"""
# noinspection PyProtectedMember
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
# self should be the `delegate` object here
# call local (server-side) method first
method(self, *args, **kwargs)
# prepare dictionary of args
callargs = inspect.getcallargs(method, self, *args, **kwargs)
callargs.pop('self')
# if our method takes in a callback
if 'callback' in callargs:
# store callback object on server-side (and remove from message)
callback_id = self._message_handler._register_callback(callargs.pop('callback'))
# attach callback id to message
callargs['callback_id'] = callback_id
# create message
msg = {'method': method.__name__, 'args': callargs}
# send message to client
msg = json.dumps(msg)
self._message_handler.write_message(msg)
return wrapper | def sending(method):
"""A decorator that calls the method on the client by sending a message over websocket"""
# noinspection PyProtectedMember
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
# self should be the `delegate` object here
# call local (server-side) method first
method(self, *args, **kwargs)
# prepare dictionary of args
callargs = inspect.getcallargs(method, self, *args, **kwargs)
callargs.pop('self')
# if our method takes in a callback
if 'callback' in callargs:
# store callback object on server-side (and remove from message)
callback_id = self._message_handler._register_callback(callargs.pop('callback'))
# attach callback id to message
callargs['callback_id'] = callback_id
# create message
msg = {'method': method.__name__, 'args': callargs}
# send message to client
msg = json.dumps(msg)
self._message_handler.write_message(msg)
return wrapper |
Python | def forward(recipient):
"""Return a decorator that attempts to call a method with the same name on the recipient
The recipient can either be an object or the path to the object in string, in which case the object will be searched for each time the decorated method is called.
"""
# the decorator
def decorator(method):
@functools.wraps(method)
def wrapper(*args, **kwargs):
# if recipient is a path, attempt to get actual object
if isinstance(recipient, str):
path = recipient.split('.')
# retrieve recipient object
try:
# object will be looked for in the scope of `method`
r = inspect.getcallargs(method, *args, **kwargs)[path.pop(0)]
for attr in path:
r = getattr(r, attr)
except (NameError, AttributeError):
# re-raise for now
raise
else:
r = recipient
# call method on recipient
call_on(r, method.__name__, *args, **kwargs)
return wrapper
return decorator | def forward(recipient):
"""Return a decorator that attempts to call a method with the same name on the recipient
The recipient can either be an object or the path to the object in string, in which case the object will be searched for each time the decorated method is called.
"""
# the decorator
def decorator(method):
@functools.wraps(method)
def wrapper(*args, **kwargs):
# if recipient is a path, attempt to get actual object
if isinstance(recipient, str):
path = recipient.split('.')
# retrieve recipient object
try:
# object will be looked for in the scope of `method`
r = inspect.getcallargs(method, *args, **kwargs)[path.pop(0)]
for attr in path:
r = getattr(r, attr)
except (NameError, AttributeError):
# re-raise for now
raise
else:
r = recipient
# call method on recipient
call_on(r, method.__name__, *args, **kwargs)
return wrapper
return decorator |
Python | def call(method, *args, check_error=True, **kwargs):
"""Attempt to call the method
If a matching method cannot be found, an InvalidMethodError is raised. If the arguments are invalid, an InvalidArgumentError is raised. Setting the `check_error` option to False will suppress these errors.
"""
try:
if method and isinstance(method, collections.Callable):
try:
inspect.getcallargs(method, *args, **kwargs)
except TypeError as err:
raise InvalidArgumentError(
"Attempt to invoke %s resulted in a TypeError %s" % (method, err),
method=method,
args=args,
kwargs=kwargs.copy()
)
else:
return method(*args, **kwargs)
else:
raise InvalidMethodError(method=method)
except (InvalidMethodError, InvalidArgumentError):
if check_error:
raise | def call(method, *args, check_error=True, **kwargs):
"""Attempt to call the method
If a matching method cannot be found, an InvalidMethodError is raised. If the arguments are invalid, an InvalidArgumentError is raised. Setting the `check_error` option to False will suppress these errors.
"""
try:
if method and isinstance(method, collections.Callable):
try:
inspect.getcallargs(method, *args, **kwargs)
except TypeError as err:
raise InvalidArgumentError(
"Attempt to invoke %s resulted in a TypeError %s" % (method, err),
method=method,
args=args,
kwargs=kwargs.copy()
)
else:
return method(*args, **kwargs)
else:
raise InvalidMethodError(method=method)
except (InvalidMethodError, InvalidArgumentError):
if check_error:
raise |
Python | def call_on(recipient, method_name, *args, check_error=True, **kwargs):
"""Attempt to call a method with the same name on the recipient
If a matching method cannot be found, an InvalidMethodError is raised. If the arguments are invalid, an InvalidArgumentError is raised. Setting the `check_error` option to False will suppress these errors.
"""
method = getattr(recipient, method_name, None)
call(method, *args, check_error=check_error, **kwargs) | def call_on(recipient, method_name, *args, check_error=True, **kwargs):
"""Attempt to call a method with the same name on the recipient
If a matching method cannot be found, an InvalidMethodError is raised. If the arguments are invalid, an InvalidArgumentError is raised. Setting the `check_error` option to False will suppress these errors.
"""
method = getattr(recipient, method_name, None)
call(method, *args, check_error=check_error, **kwargs) |
Python | def _get_top_ranking_propoals(probs):
"""Get top ranking proposals by k-means"""
dev = probs.device
kmeans = KMeans(n_clusters=5).fit(probs.cpu().numpy())
high_score_label = np.argmax(kmeans.cluster_centers_)
index = np.where(kmeans.labels_ == high_score_label)[0]
if len(index) == 0:
index = np.array([np.argmax(probs)])
return torch.from_numpy(index).to(dev) | def _get_top_ranking_propoals(probs):
"""Get top ranking proposals by k-means"""
dev = probs.device
kmeans = KMeans(n_clusters=5).fit(probs.cpu().numpy())
high_score_label = np.argmax(kmeans.cluster_centers_)
index = np.where(kmeans.labels_ == high_score_label)[0]
if len(index) == 0:
index = np.array([np.argmax(probs)])
return torch.from_numpy(index).to(dev) |
Python | def _get_proposal_clusters(all_rois, proposals, im_labels, cls_prob):
"""Generate a random sample of RoIs comprising foreground and background
examples.
"""
num_images, num_classes = im_labels.shape
assert num_images == 1, 'batch size shoud be equal to 1'
# overlaps: (rois x gt_boxes)
gt_boxes = proposals['gt_boxes']
gt_labels = proposals['gt_classes']
gt_scores = proposals['gt_scores']
overlaps = ops.box_iou(all_rois.to(gt_boxes.device), gt_boxes)
max_overlaps, gt_assignment = overlaps.max(dim=1)
labels = gt_labels[gt_assignment, 0]
cls_loss_weights = gt_scores[gt_assignment, 0]
# Select foreground RoIs as those with >= FG_THRESH overlap
fg_inds = (max_overlaps >= 0.5).nonzero()[:,0]
# Select background RoIs as those with < FG_THRESH overlap
bg_inds = (max_overlaps < 0.5).nonzero()[:,0]
ig_inds = (max_overlaps < 0.1).nonzero()[:,0]
cls_loss_weights[ig_inds] = 0.0
labels[bg_inds] = 0
gt_assignment[bg_inds] = -1
img_cls_loss_weights = torch.zeros(gt_boxes.shape[0], dtype=cls_prob.dtype, device=cls_prob.device)
pc_probs = torch.zeros(gt_boxes.shape[0], dtype=cls_prob.dtype, device=cls_prob.device)
pc_labels = torch.zeros(gt_boxes.shape[0], dtype=torch.long, device=cls_prob.device)
pc_count = torch.zeros(gt_boxes.shape[0], dtype=torch.long, device=cls_prob.device)
for i in range(gt_boxes.shape[0]):
po_index = (gt_assignment == i).nonzero()[:,0]
img_cls_loss_weights[i] = torch.sum(cls_loss_weights[po_index])
pc_labels[i] = gt_labels[i, 0]
pc_count[i] = len(po_index)
pc_probs[i] = (cls_prob[po_index, pc_labels[i]]).mean()
return labels, cls_loss_weights, gt_assignment, pc_labels, pc_probs, pc_count, img_cls_loss_weights | def _get_proposal_clusters(all_rois, proposals, im_labels, cls_prob):
"""Generate a random sample of RoIs comprising foreground and background
examples.
"""
num_images, num_classes = im_labels.shape
assert num_images == 1, 'batch size shoud be equal to 1'
# overlaps: (rois x gt_boxes)
gt_boxes = proposals['gt_boxes']
gt_labels = proposals['gt_classes']
gt_scores = proposals['gt_scores']
overlaps = ops.box_iou(all_rois.to(gt_boxes.device), gt_boxes)
max_overlaps, gt_assignment = overlaps.max(dim=1)
labels = gt_labels[gt_assignment, 0]
cls_loss_weights = gt_scores[gt_assignment, 0]
# Select foreground RoIs as those with >= FG_THRESH overlap
fg_inds = (max_overlaps >= 0.5).nonzero()[:,0]
# Select background RoIs as those with < FG_THRESH overlap
bg_inds = (max_overlaps < 0.5).nonzero()[:,0]
ig_inds = (max_overlaps < 0.1).nonzero()[:,0]
cls_loss_weights[ig_inds] = 0.0
labels[bg_inds] = 0
gt_assignment[bg_inds] = -1
img_cls_loss_weights = torch.zeros(gt_boxes.shape[0], dtype=cls_prob.dtype, device=cls_prob.device)
pc_probs = torch.zeros(gt_boxes.shape[0], dtype=cls_prob.dtype, device=cls_prob.device)
pc_labels = torch.zeros(gt_boxes.shape[0], dtype=torch.long, device=cls_prob.device)
pc_count = torch.zeros(gt_boxes.shape[0], dtype=torch.long, device=cls_prob.device)
for i in range(gt_boxes.shape[0]):
po_index = (gt_assignment == i).nonzero()[:,0]
img_cls_loss_weights[i] = torch.sum(cls_loss_weights[po_index])
pc_labels[i] = gt_labels[i, 0]
pc_count[i] = len(po_index)
pc_probs[i] = (cls_prob[po_index, pc_labels[i]]).mean()
return labels, cls_loss_weights, gt_assignment, pc_labels, pc_probs, pc_count, img_cls_loss_weights |
Python | def build_optimizer(cfg, model: torch.nn.Module) -> torch.optim.Optimizer:
"""
Build an optimizer from config.
"""
params: List[Dict[str, Any]] = []
for key, value in model.named_parameters():
if not value.requires_grad:
print(f'{key} requires no grad')
continue
lr = cfg.SOLVER.BASE_LR
weight_decay = cfg.SOLVER.WEIGHT_DECAY
if key.endswith("norm.weight") or key.endswith("norm.bias"):
weight_decay = cfg.SOLVER.WEIGHT_DECAY_NORM
elif key.endswith(".bias"):
# NOTE: unlike Detectron v1, we now default BIAS_LR_FACTOR to 1.0
# and WEIGHT_DECAY_BIAS to WEIGHT_DECAY so that bias optimizer
# hyperparameters are by default exactly the same as for regular
# weights.
lr = cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_LR_FACTOR
weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
if 'refinement' in key:
lr = lr * cfg.SOLVER.REFINEMENT_LR_FACTOR
params += [{"params": [value], "lr": lr, "weight_decay": weight_decay}]
print(f'{key} | lr: {lr:6.04f}, weight_decay: {weight_decay:6.04f}')
solver_type = cfg.SOLVER.TYPE.lower()
if solver_type == 'sgd':
optimizer = torch.optim.SGD(params, lr, momentum=cfg.SOLVER.MOMENTUM)
elif solver_type == 'caffesgd':
from optim.caffesgd import CaffeSGD
optimizer = CaffeSGD(params, lr, momentum=cfg.SOLVER.MOMENTUM)
return optimizer | def build_optimizer(cfg, model: torch.nn.Module) -> torch.optim.Optimizer:
"""
Build an optimizer from config.
"""
params: List[Dict[str, Any]] = []
for key, value in model.named_parameters():
if not value.requires_grad:
print(f'{key} requires no grad')
continue
lr = cfg.SOLVER.BASE_LR
weight_decay = cfg.SOLVER.WEIGHT_DECAY
if key.endswith("norm.weight") or key.endswith("norm.bias"):
weight_decay = cfg.SOLVER.WEIGHT_DECAY_NORM
elif key.endswith(".bias"):
# NOTE: unlike Detectron v1, we now default BIAS_LR_FACTOR to 1.0
# and WEIGHT_DECAY_BIAS to WEIGHT_DECAY so that bias optimizer
# hyperparameters are by default exactly the same as for regular
# weights.
lr = cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_LR_FACTOR
weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
if 'refinement' in key:
lr = lr * cfg.SOLVER.REFINEMENT_LR_FACTOR
params += [{"params": [value], "lr": lr, "weight_decay": weight_decay}]
print(f'{key} | lr: {lr:6.04f}, weight_decay: {weight_decay:6.04f}')
solver_type = cfg.SOLVER.TYPE.lower()
if solver_type == 'sgd':
optimizer = torch.optim.SGD(params, lr, momentum=cfg.SOLVER.MOMENTUM)
elif solver_type == 'caffesgd':
from optim.caffesgd import CaffeSGD
optimizer = CaffeSGD(params, lr, momentum=cfg.SOLVER.MOMENTUM)
return optimizer |
Python | def build_lr_scheduler(cfg, optimizer):
"""
Build a LR scheduler from config.
"""
name = cfg.SOLVER.LR_SCHEDULER_NAME
if name == "WarmupMultiStepLR":
return WarmupMultiStepLR(
optimizer,
cfg.SOLVER.STEPS,
cfg.SOLVER.GAMMA,
warmup_factor=cfg.SOLVER.WARMUP_FACTOR,
warmup_iters=cfg.SOLVER.WARMUP_ITERS,
warmup_method=cfg.SOLVER.WARMUP_METHOD,
)
elif name == "WarmupCosineLR":
return WarmupCosineLR(
optimizer,
cfg.SOLVER.MAX_ITER,
warmup_factor=cfg.SOLVER.WARMUP_FACTOR,
warmup_iters=cfg.SOLVER.WARMUP_ITERS,
warmup_method=cfg.SOLVER.WARMUP_METHOD,
)
elif name == "CaffeLRScheduler":
from optim.caffesgd import CaffeLRScheduler
return CaffeLRScheduler(
optimizer,
cfg.SOLVER.STEPS,
cfg.SOLVER.GAMMA,
warmup_factor=cfg.SOLVER.WARMUP_FACTOR,
warmup_iters=cfg.SOLVER.WARMUP_ITERS,
warmup_method=cfg.SOLVER.WARMUP_METHOD,
)
else:
raise ValueError("Unknown LR scheduler: {}".format(name)) | def build_lr_scheduler(cfg, optimizer):
"""
Build a LR scheduler from config.
"""
name = cfg.SOLVER.LR_SCHEDULER_NAME
if name == "WarmupMultiStepLR":
return WarmupMultiStepLR(
optimizer,
cfg.SOLVER.STEPS,
cfg.SOLVER.GAMMA,
warmup_factor=cfg.SOLVER.WARMUP_FACTOR,
warmup_iters=cfg.SOLVER.WARMUP_ITERS,
warmup_method=cfg.SOLVER.WARMUP_METHOD,
)
elif name == "WarmupCosineLR":
return WarmupCosineLR(
optimizer,
cfg.SOLVER.MAX_ITER,
warmup_factor=cfg.SOLVER.WARMUP_FACTOR,
warmup_iters=cfg.SOLVER.WARMUP_ITERS,
warmup_method=cfg.SOLVER.WARMUP_METHOD,
)
elif name == "CaffeLRScheduler":
from optim.caffesgd import CaffeLRScheduler
return CaffeLRScheduler(
optimizer,
cfg.SOLVER.STEPS,
cfg.SOLVER.GAMMA,
warmup_factor=cfg.SOLVER.WARMUP_FACTOR,
warmup_iters=cfg.SOLVER.WARMUP_ITERS,
warmup_method=cfg.SOLVER.WARMUP_METHOD,
)
else:
raise ValueError("Unknown LR scheduler: {}".format(name)) |
Python | def initialize_parser():
"""For running from command line, initialize argparse with common args"""
ftypes = [
"png",
"jpg",
"jpeg",
"pdf",
"ps",
"eps",
"rgba",
"svg",
"tiff",
"tif",
"pgf",
"svgz",
"raw",
]
parser = argparse.ArgumentParser()
parser.add_argument(
"-s",
"--savefig",
action="store",
default=False,
choices=ftypes,
help="Save figure to a file",
)
parser.add_argument(
"-w",
"--waverange",
choices=["all", "nir", "mir"],
default="all",
help="Wavelength range to display",
)
return parser | def initialize_parser():
"""For running from command line, initialize argparse with common args"""
ftypes = [
"png",
"jpg",
"jpeg",
"pdf",
"ps",
"eps",
"rgba",
"svg",
"tiff",
"tif",
"pgf",
"svgz",
"raw",
]
parser = argparse.ArgumentParser()
parser.add_argument(
"-s",
"--savefig",
action="store",
default=False,
choices=ftypes,
help="Save figure to a file",
)
parser.add_argument(
"-w",
"--waverange",
choices=["all", "nir", "mir"],
default="all",
help="Wavelength range to display",
)
return parser |
Python | def create_npm_package_archive_build_file():
"""Creates the contents of a `BUILD.bazel` file for exposing NPM package tarballs
for the integration test packages configured in the constant.
The `BUILD.bazel` file contents are supposed to be placed into the `@npm//`
workspace top-level BUILD file. This is necessary because all files of a NPM
package are not accessible outside from the `@npm//` workspace.
"""
result = """load("@rules_pkg//:pkg.bzl", "pkg_tar")"""
for pkg in INTEGRATION_TEST_PACKAGES:
label_name = _get_archive_label_of_package(pkg)
last_segment = pkg.split("/")[-1]
result += """
pkg_tar(
name = "{label_name}",
srcs = ["//{name}:{last_segment}__all_files"],
extension = "tar.gz",
package_dir = "package/",
strip_prefix = "/external/npm/node_modules/{name}",
tags = ["manual"],
)""".format(name = pkg, label_name = label_name, last_segment = last_segment)
return result | def create_npm_package_archive_build_file():
"""Creates the contents of a `BUILD.bazel` file for exposing NPM package tarballs
for the integration test packages configured in the constant.
The `BUILD.bazel` file contents are supposed to be placed into the `@npm//`
workspace top-level BUILD file. This is necessary because all files of a NPM
package are not accessible outside from the `@npm//` workspace.
"""
result = """load("@rules_pkg//:pkg.bzl", "pkg_tar")"""
for pkg in INTEGRATION_TEST_PACKAGES:
label_name = _get_archive_label_of_package(pkg)
last_segment = pkg.split("/")[-1]
result += """
pkg_tar(
name = "{label_name}",
srcs = ["//{name}:{last_segment}__all_files"],
extension = "tar.gz",
package_dir = "package/",
strip_prefix = "/external/npm/node_modules/{name}",
tags = ["manual"],
)""".format(name = pkg, label_name = label_name, last_segment = last_segment)
return result |
Python | def run(self):
"""
.run() function is used to store returns to fetch from a join implementation
"""
if self._target is not None:
self._return = self._target(*self._args, **self._kwargs) | def run(self):
"""
.run() function is used to store returns to fetch from a join implementation
"""
if self._target is not None:
self._return = self._target(*self._args, **self._kwargs) |
Python | def join(self, *args):
"""
.join() function can be used to get returns from a join implementation
"""
threading.Thread.join(self, *args, timeout=-1)
return self._return | def join(self, *args):
"""
.join() function can be used to get returns from a join implementation
"""
threading.Thread.join(self, *args, timeout=-1)
return self._return |
Python | def join(self, *args):
"""
.join() function can be used to get returns from a join implementation
"""
multiprocessing.Process.join(self, *args, timeout=-1)
return self._return | def join(self, *args):
"""
.join() function can be used to get returns from a join implementation
"""
multiprocessing.Process.join(self, *args, timeout=-1)
return self._return |
Python | def runRAFT(fname_design, fname_turbine, fname_env):
'''
This the main function for running the raft model in standalone form, where inputs are contained in the specified input files.
'''
# open the design YAML file and parse it into a dictionary for passing to raft
with open(fname_design) as file:
design = yaml.load(file, Loader=yaml.FullLoader)
print("Loading file: "+fname_design)
print(f"'{design['name']}'")
depth = float(design['mooring']['water_depth'])
# now off potMod in the design dictionary to avoid BEM analysis
for mi in design['platform']['members']: mi['potMod'] = False
# set up frequency range
w = np.arange(0.05, 5, 0.05) # frequency range (to be set by modeling options yaml)
# read in turbine data and combine it in
# turbine = loadTurbineYAML(fname_turbine)
# design['turbine'].update(turbine)
# --- Create and run the model ---
model = raft.Model(design, w=w, depth=depth) # set up model
model.setEnv(Hs=8, Tp=12, V=10, Fthrust=float(design['turbine']['Fthrust'])) # set basic wave and wind info
model.calcSystemProps() # get all the setup calculations done within the model
model.solveEigen()
model.calcMooringAndOffsets() # calculate the offsets for the given loading
model.solveDynamics() # put everything together and iteratively solve the dynamic response
model.plot()
plt.show()
return model | def runRAFT(fname_design, fname_turbine, fname_env):
'''
This the main function for running the raft model in standalone form, where inputs are contained in the specified input files.
'''
# open the design YAML file and parse it into a dictionary for passing to raft
with open(fname_design) as file:
design = yaml.load(file, Loader=yaml.FullLoader)
print("Loading file: "+fname_design)
print(f"'{design['name']}'")
depth = float(design['mooring']['water_depth'])
# now off potMod in the design dictionary to avoid BEM analysis
for mi in design['platform']['members']: mi['potMod'] = False
# set up frequency range
w = np.arange(0.05, 5, 0.05) # frequency range (to be set by modeling options yaml)
# read in turbine data and combine it in
# turbine = loadTurbineYAML(fname_turbine)
# design['turbine'].update(turbine)
# --- Create and run the model ---
model = raft.Model(design, w=w, depth=depth) # set up model
model.setEnv(Hs=8, Tp=12, V=10, Fthrust=float(design['turbine']['Fthrust'])) # set basic wave and wind info
model.calcSystemProps() # get all the setup calculations done within the model
model.solveEigen()
model.calcMooringAndOffsets() # calculate the offsets for the given loading
model.solveDynamics() # put everything together and iteratively solve the dynamic response
model.plot()
plt.show()
return model |
Python | def translateForce3to6DOF(Fin, r):
'''Takes in a position vector and a force vector (applied at the positon), and calculates
the resulting 6-DOF force and moment vector.
:param array r: x,y,z coordinates at which force is acting [m]
:param array Fin: x,y,z components of force [N]
:return: the resulting force and moment vector
:rtype: array
'''
Fout = np.zeros(6, dtype=Fin.dtype) # initialize output vector as same dtype as input vector (to support both real and complex inputs)
Fout[:3] = Fin
Fout[3:] = np.cross(r, Fin)
return Fout | def translateForce3to6DOF(Fin, r):
'''Takes in a position vector and a force vector (applied at the positon), and calculates
the resulting 6-DOF force and moment vector.
:param array r: x,y,z coordinates at which force is acting [m]
:param array Fin: x,y,z components of force [N]
:return: the resulting force and moment vector
:rtype: array
'''
Fout = np.zeros(6, dtype=Fin.dtype) # initialize output vector as same dtype as input vector (to support both real and complex inputs)
Fout[:3] = Fin
Fout[3:] = np.cross(r, Fin)
return Fout |
Python | def transformForce(f_in, offset=[], orientation=[]):
'''Transform a size-3 or size-6 force from one reference frame to another
Parameters
----------
f_in : size 3 or 6 array
the input force vector or force and moment vector
offset : size-3 array
the x,y,z coordinates at which f_in is acting, relative to the reference frame at which the force and moment should be returned
orientation : size-3 array
The orientation of f_in relative to the reference frame of the results. If size 3: x,y,z Euler angles
describing the rotations around each axis (applied in order z, y, x). If 3-by-3, the rotation matrix.
'''
# input size checks
if not len(f_in) in [3,6]:
raise ValueError("f_in input must be size 3 or 6")
if not len(offset) in [0,3]:
raise ValueError("offset input if provided must be size 3")
# prep output
if len(f_in) == 6:
f = np.array(f_in)
elif len(f_in) == 3:
f = np.hstack([f_in, [0,0,0]])
# prep rotation matrix
if len(orientation) > 0:
rot = np.array(orientation)
if rot.shape == (3,):
rotMat = rotationMatrix(*rot)
elif rot.shape == (3,3):
rotMat = rot
else:
raise ValueError("orientation input if provided must be size 3 or 3-by-3")
# rotation
f_in2 = np.array(f_in)
if len(orientation) > 0:
f[:3] = np.matmul(rotMat, f_in2[:3])
if len(f_in) == 6:
f[3:] = np.matmul(rotMat, f_in2[3:])
# translation
if len(offset) > 0:
f[3:] += np.cross(offset, f[:3]) # add moment created by offsetting forces
return f | def transformForce(f_in, offset=[], orientation=[]):
'''Transform a size-3 or size-6 force from one reference frame to another
Parameters
----------
f_in : size 3 or 6 array
the input force vector or force and moment vector
offset : size-3 array
the x,y,z coordinates at which f_in is acting, relative to the reference frame at which the force and moment should be returned
orientation : size-3 array
The orientation of f_in relative to the reference frame of the results. If size 3: x,y,z Euler angles
describing the rotations around each axis (applied in order z, y, x). If 3-by-3, the rotation matrix.
'''
# input size checks
if not len(f_in) in [3,6]:
raise ValueError("f_in input must be size 3 or 6")
if not len(offset) in [0,3]:
raise ValueError("offset input if provided must be size 3")
# prep output
if len(f_in) == 6:
f = np.array(f_in)
elif len(f_in) == 3:
f = np.hstack([f_in, [0,0,0]])
# prep rotation matrix
if len(orientation) > 0:
rot = np.array(orientation)
if rot.shape == (3,):
rotMat = rotationMatrix(*rot)
elif rot.shape == (3,3):
rotMat = rot
else:
raise ValueError("orientation input if provided must be size 3 or 3-by-3")
# rotation
f_in2 = np.array(f_in)
if len(orientation) > 0:
f[:3] = np.matmul(rotMat, f_in2[:3])
if len(f_in) == 6:
f[3:] = np.matmul(rotMat, f_in2[3:])
# translation
if len(offset) > 0:
f[3:] += np.cross(offset, f[:3]) # add moment created by offsetting forces
return f |
Python | def translateMatrix3to6DOF(Min, r):
'''Transforms a 3x3 matrix to be about a translated reference point, resulting in a 6x6 matrix.'''
# sub-matrix definitions are accordint to | m J |
# | J^T I |
# note that the J term and I terms are zero in this case because the input is just a mass matrix (assumed to be about CG)
H = getH(r) # "anti-symmetric tensor components" from Sadeghi and Incecik
Mout = np.zeros([6,6]) #, dtype=complex)
# mass matrix [m'] = [m]
Mout[:3,:3] = Min
# product of inertia matrix [J'] = [m][H] + [J]
Mout[:3,3:] = np.matmul(Min, H)
Mout[3:,:3] = Mout[:3,3:].T
# moment of inertia matrix [I'] = [H][m][H]^T + [J]^T [H] + [H]^T [J] + [I]
Mout[3:,3:] = np.matmul(np.matmul(H,Min), H.T)
return Mout | def translateMatrix3to6DOF(Min, r):
'''Transforms a 3x3 matrix to be about a translated reference point, resulting in a 6x6 matrix.'''
# sub-matrix definitions are accordint to | m J |
# | J^T I |
# note that the J term and I terms are zero in this case because the input is just a mass matrix (assumed to be about CG)
H = getH(r) # "anti-symmetric tensor components" from Sadeghi and Incecik
Mout = np.zeros([6,6]) #, dtype=complex)
# mass matrix [m'] = [m]
Mout[:3,:3] = Min
# product of inertia matrix [J'] = [m][H] + [J]
Mout[:3,3:] = np.matmul(Min, H)
Mout[3:,:3] = Mout[:3,3:].T
# moment of inertia matrix [I'] = [H][m][H]^T + [J]^T [H] + [H]^T [J] + [I]
Mout[3:,3:] = np.matmul(np.matmul(H,Min), H.T)
return Mout |
Python | def translateMatrix6to6DOF(Min, r):
'''Transforms a 6x6 matrix to be about a translated reference point.
r is a vector that goes from where you want the reference point to be
to where the reference point currently is'''
# sub-matrix definitions are accordint to | m J |
# | J^T I |
H = getH(r) # "anti-symmetric tensor components" from Sadeghi and Incecik
Mout = np.zeros([6,6]) #, dtype=complex)
# mass matrix [m'] = [m]
Mout[:3,:3] = Min[:3,:3]
# product of inertia matrix [J'] = [m][H] + [J]
Mout[:3,3:] = np.matmul(Min[:3,:3], H) + Min[:3,3:]
Mout[3:,:3] = Mout[:3,3:].T
# moment of inertia matrix [I'] = [H][m][H]^T + [J]^T [H] + [H]^T [J] + [I]
Mout[3:,3:] = np.matmul(np.matmul(H,Min[:3,:3]), H.T) + np.matmul(Min[3:,:3], H) + np.matmul(H.T, Min[:3,3:]) + Min[3:,3:]
return Mout | def translateMatrix6to6DOF(Min, r):
'''Transforms a 6x6 matrix to be about a translated reference point.
r is a vector that goes from where you want the reference point to be
to where the reference point currently is'''
# sub-matrix definitions are accordint to | m J |
# | J^T I |
H = getH(r) # "anti-symmetric tensor components" from Sadeghi and Incecik
Mout = np.zeros([6,6]) #, dtype=complex)
# mass matrix [m'] = [m]
Mout[:3,:3] = Min[:3,:3]
# product of inertia matrix [J'] = [m][H] + [J]
Mout[:3,3:] = np.matmul(Min[:3,:3], H) + Min[:3,3:]
Mout[3:,:3] = Mout[:3,3:].T
# moment of inertia matrix [I'] = [H][m][H]^T + [J]^T [H] + [H]^T [J] + [I]
Mout[3:,3:] = np.matmul(np.matmul(H,Min[:3,:3]), H.T) + np.matmul(Min[3:,:3], H) + np.matmul(H.T, Min[:3,3:]) + Min[3:,3:]
return Mout |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.