language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
Python
def plot_graph_route_pix(G, route, im=None, bbox=None, fig_height=6, fig_width=None, margin=0.02, bgcolor='w', axis_off=True, show=True, save=False, close=True, file_format='png', filename='temp', dpi=300, annotate=False, node_color='#999999', node_size=15, node_alpha=1, node_edgecolor='none', node_zorder=1, edge_color='#999999', edge_linewidth=1, edge_alpha=1, edge_width_key='speed_mph', edge_width_mult=1. / 25, use_geom=True, origin_point=None, destination_point=None, route_color='r', route_linewidth=4, route_alpha=0.5, orig_dest_node_alpha=0.5, orig_dest_node_size=100, orig_dest_node_color='r'): """ Plot a route along a networkx spatial graph. Parameters ---------- G : networkx multidigraph route : list the route as a list of nodes bbox : tuple bounding box as north,south,east,west - if None will calculate from spatial extents of data. if passing a bbox, you probably also want to pass margin=0 to constrain it. fig_height : int matplotlib figure height in inches fig_width : int matplotlib figure width in inches margin : float relative margin around the figure axis_off : bool if True turn off the matplotlib axis bgcolor : string the background color of the figure and axis show : bool if True, show the figure save : bool if True, save the figure as an image file to disk close : bool close the figure (only if show equals False) to prevent display file_format : string the format of the file to save (e.g., 'jpg', 'png', 'svg') filename : string the name of the file if saving dpi : int the resolution of the image file if saving annotate : bool if True, annotate the nodes in the figure node_color : string the color of the nodes node_size : int the size of the nodes node_alpha : float the opacity of the nodes node_edgecolor : string the color of the node's marker's border node_zorder : int zorder to plot nodes, edges are always 2, so make node_zorder 1 to plot nodes beneath them or 3 to plot nodes atop them edge_color : string the color of the edges' lines edge_linewidth : float the width of the edges' lines edge_alpha : float the opacity of the edges' lines use_geom : bool if True, use the spatial geometry attribute of the edges to draw geographically accurate edges, rather than just lines straight from node to node origin_point : tuple optional, an origin (lat, lon) point to plot instead of the origin node destination_point : tuple optional, a destination (lat, lon) point to plot instead of the destination node route_color : string the color of the route route_linewidth : int the width of the route line route_alpha : float the opacity of the route line orig_dest_node_alpha : float the opacity of the origin and destination nodes orig_dest_node_size : int the size of the origin and destination nodes orig_dest_node_color : string the color of the origin and destination nodes (can be a string or list with (origin_color, dest_color)) of nodes Returns ------- fig, ax : tuple """ # plot the graph but not the route fig, ax = plot_graph_pix(G, im=im, bbox=bbox, fig_height=fig_height, fig_width=fig_width, margin=margin, axis_off=axis_off, bgcolor=bgcolor, show=False, save=False, close=False, filename=filename, default_dpi=dpi, annotate=annotate, node_color=node_color, node_size=node_size, node_alpha=node_alpha, node_edgecolor=node_edgecolor, node_zorder=node_zorder, edge_color=edge_color, edge_linewidth=edge_linewidth, edge_alpha=edge_alpha, edge_width_key=edge_width_key, edge_width_mult=edge_width_mult, use_geom=use_geom) # the origin and destination nodes are the first and last nodes in the route origin_node = route[0] destination_node = route[-1] if origin_point is None or destination_point is None: # if caller didn't pass points, use the first and last node in route as # origin/destination origin_destination_ys = (G.nodes[origin_node]['y_pix'], G.nodes[destination_node]['y_pix']) origin_destination_xs = (G.nodes[origin_node]['x_pix'], G.nodes[destination_node]['x_pix']) else: # otherwise, use the passed points as origin/destination origin_destination_xs = (origin_point[0], destination_point[0]) origin_destination_ys = (origin_point[1], destination_point[1]) # scatter the origin and destination points ax.scatter(origin_destination_xs, origin_destination_ys, s=orig_dest_node_size, c=orig_dest_node_color, alpha=orig_dest_node_alpha, edgecolor=node_edgecolor, zorder=4) # plot the route lines edge_nodes = list(zip(route[:-1], route[1:])) lines = [] for u, v in edge_nodes: # if there are parallel edges, select the shortest in length data = min(G.get_edge_data(u, v).values(), key=lambda x: x['length']) # if it has a geometry attribute (ie, a list of line segments) if 'geometry_pix' in data and use_geom: # add them to the list of lines to plot xs, ys = data['geometry_pix'].xy lines.append(list(zip(xs, ys))) else: # if it doesn't have a geometry attribute, the edge is a straight # line from node to node x1 = G.nodes[u]['x_pix'] y1 = G.nodes[u]['y_pix'] x2 = G.nodes[v]['x_pix'] y2 = G.nodes[v]['y_pix'] line = [(x1, y1), (x2, y2)] lines.append(line) # add the lines to the axis as a linecollection lc = LineCollection(lines, colors=route_color, linewidths=route_linewidth, alpha=route_alpha, zorder=3) ax.add_collection(lc) # save and show the figure as specified fig, ax = save_and_show(fig, ax, save, show, close, filename, file_format, dpi, axis_off) return fig, ax
def plot_graph_route_pix(G, route, im=None, bbox=None, fig_height=6, fig_width=None, margin=0.02, bgcolor='w', axis_off=True, show=True, save=False, close=True, file_format='png', filename='temp', dpi=300, annotate=False, node_color='#999999', node_size=15, node_alpha=1, node_edgecolor='none', node_zorder=1, edge_color='#999999', edge_linewidth=1, edge_alpha=1, edge_width_key='speed_mph', edge_width_mult=1. / 25, use_geom=True, origin_point=None, destination_point=None, route_color='r', route_linewidth=4, route_alpha=0.5, orig_dest_node_alpha=0.5, orig_dest_node_size=100, orig_dest_node_color='r'): """ Plot a route along a networkx spatial graph. Parameters ---------- G : networkx multidigraph route : list the route as a list of nodes bbox : tuple bounding box as north,south,east,west - if None will calculate from spatial extents of data. if passing a bbox, you probably also want to pass margin=0 to constrain it. fig_height : int matplotlib figure height in inches fig_width : int matplotlib figure width in inches margin : float relative margin around the figure axis_off : bool if True turn off the matplotlib axis bgcolor : string the background color of the figure and axis show : bool if True, show the figure save : bool if True, save the figure as an image file to disk close : bool close the figure (only if show equals False) to prevent display file_format : string the format of the file to save (e.g., 'jpg', 'png', 'svg') filename : string the name of the file if saving dpi : int the resolution of the image file if saving annotate : bool if True, annotate the nodes in the figure node_color : string the color of the nodes node_size : int the size of the nodes node_alpha : float the opacity of the nodes node_edgecolor : string the color of the node's marker's border node_zorder : int zorder to plot nodes, edges are always 2, so make node_zorder 1 to plot nodes beneath them or 3 to plot nodes atop them edge_color : string the color of the edges' lines edge_linewidth : float the width of the edges' lines edge_alpha : float the opacity of the edges' lines use_geom : bool if True, use the spatial geometry attribute of the edges to draw geographically accurate edges, rather than just lines straight from node to node origin_point : tuple optional, an origin (lat, lon) point to plot instead of the origin node destination_point : tuple optional, a destination (lat, lon) point to plot instead of the destination node route_color : string the color of the route route_linewidth : int the width of the route line route_alpha : float the opacity of the route line orig_dest_node_alpha : float the opacity of the origin and destination nodes orig_dest_node_size : int the size of the origin and destination nodes orig_dest_node_color : string the color of the origin and destination nodes (can be a string or list with (origin_color, dest_color)) of nodes Returns ------- fig, ax : tuple """ # plot the graph but not the route fig, ax = plot_graph_pix(G, im=im, bbox=bbox, fig_height=fig_height, fig_width=fig_width, margin=margin, axis_off=axis_off, bgcolor=bgcolor, show=False, save=False, close=False, filename=filename, default_dpi=dpi, annotate=annotate, node_color=node_color, node_size=node_size, node_alpha=node_alpha, node_edgecolor=node_edgecolor, node_zorder=node_zorder, edge_color=edge_color, edge_linewidth=edge_linewidth, edge_alpha=edge_alpha, edge_width_key=edge_width_key, edge_width_mult=edge_width_mult, use_geom=use_geom) # the origin and destination nodes are the first and last nodes in the route origin_node = route[0] destination_node = route[-1] if origin_point is None or destination_point is None: # if caller didn't pass points, use the first and last node in route as # origin/destination origin_destination_ys = (G.nodes[origin_node]['y_pix'], G.nodes[destination_node]['y_pix']) origin_destination_xs = (G.nodes[origin_node]['x_pix'], G.nodes[destination_node]['x_pix']) else: # otherwise, use the passed points as origin/destination origin_destination_xs = (origin_point[0], destination_point[0]) origin_destination_ys = (origin_point[1], destination_point[1]) # scatter the origin and destination points ax.scatter(origin_destination_xs, origin_destination_ys, s=orig_dest_node_size, c=orig_dest_node_color, alpha=orig_dest_node_alpha, edgecolor=node_edgecolor, zorder=4) # plot the route lines edge_nodes = list(zip(route[:-1], route[1:])) lines = [] for u, v in edge_nodes: # if there are parallel edges, select the shortest in length data = min(G.get_edge_data(u, v).values(), key=lambda x: x['length']) # if it has a geometry attribute (ie, a list of line segments) if 'geometry_pix' in data and use_geom: # add them to the list of lines to plot xs, ys = data['geometry_pix'].xy lines.append(list(zip(xs, ys))) else: # if it doesn't have a geometry attribute, the edge is a straight # line from node to node x1 = G.nodes[u]['x_pix'] y1 = G.nodes[u]['y_pix'] x2 = G.nodes[v]['x_pix'] y2 = G.nodes[v]['y_pix'] line = [(x1, y1), (x2, y2)] lines.append(line) # add the lines to the axis as a linecollection lc = LineCollection(lines, colors=route_color, linewidths=route_linewidth, alpha=route_alpha, zorder=3) ax.add_collection(lc) # save and show the figure as specified fig, ax = save_and_show(fig, ax, save, show, close, filename, file_format, dpi, axis_off) return fig, ax
Python
def pkl_dir_to_wkt(pkl_dir, output_csv_path='', weight_keys=['length', 'travel_time_s'], verbose=False): """ Create submission wkt from directory full of graph pickles """ wkt_list = [] pkl_list = sorted([z for z in os.listdir(pkl_dir) if z.endswith('.gpickle')]) for i, pkl_name in enumerate(pkl_list): G = nx.read_gpickle(os.path.join(pkl_dir, pkl_name)) # ensure an undirected graph print(i, "/", len(pkl_list), "num G.nodes:", len(G.nodes())) name_root = pkl_name.replace('PS-RGB_', '').replace('PS-MS_', '').split('.')[0] # AOI_root = 'AOI' + pkl_name.split('AOI')[-1] # name_root = AOI_root.split('.')[0].replace('PS-RGB_', '') print("name_root:", name_root) # if empty, still add to submission if len(G.nodes()) == 0: wkt_item_root = [name_root, 'LINESTRING EMPTY'] if len(weight_keys) > 0: weights = [0 for w in weight_keys] wkt_list.append(wkt_item_root + weights) else: wkt_list.append(wkt_item_root) # extract geometry pix wkt, save to list seen_edges = set([]) for i, (u, v, attr_dict) in enumerate(G.edges(data=True)): # make sure we haven't already seen this edge if (u, v) in seen_edges or (v, u) in seen_edges: print(u, v, "already catalogued!") continue else: seen_edges.add((u, v)) seen_edges.add((v, u)) geom_pix_wkt = attr_dict['geometry_pix'].wkt # check edge lnegth if attr_dict['length'] > 5000: print("Edge too long!, u,v,data:", u,v,attr_dict) return if verbose: print(i, "/", len(G.edges()), "u, v:", u, v) print(" attr_dict:", attr_dict) print(" geom_pix_wkt:", geom_pix_wkt) wkt_item_root = [name_root, geom_pix_wkt] if len(weight_keys) > 0: weights = [attr_dict[w] for w in weight_keys] if verbose: print(" weights:", weights) wkt_list.append(wkt_item_root + weights) else: wkt_list.append(wkt_item_root) if verbose: print("wkt_list:", wkt_list) # create dataframe if len(weight_keys) > 0: cols = ['ImageId', 'WKT_Pix'] + weight_keys else: cols = ['ImageId', 'WKT_Pix'] # use 'length_m' and 'travel_time_s' instead? cols_new = [] for z in cols: if z == 'length': cols_new.append('length_m') elif z == 'travel_time': cols_new.append('travel_time_s') else: cols_new.append(z) cols = cols_new # cols = [z.replace('length', 'length_m') for z in cols] # cols = [z.replace('travel_time', 'travel_time_s') for z in cols] print("cols:", cols) df = pd.DataFrame(wkt_list, columns=cols) df.loc[:, 'ImageId'] = df.ImageId.apply(lambda name: name[name.find('AOI_'):]) print("df:", df) # save if len(output_csv_path) > 0: df.to_csv(output_csv_path, index=False) return df
def pkl_dir_to_wkt(pkl_dir, output_csv_path='', weight_keys=['length', 'travel_time_s'], verbose=False): """ Create submission wkt from directory full of graph pickles """ wkt_list = [] pkl_list = sorted([z for z in os.listdir(pkl_dir) if z.endswith('.gpickle')]) for i, pkl_name in enumerate(pkl_list): G = nx.read_gpickle(os.path.join(pkl_dir, pkl_name)) # ensure an undirected graph print(i, "/", len(pkl_list), "num G.nodes:", len(G.nodes())) name_root = pkl_name.replace('PS-RGB_', '').replace('PS-MS_', '').split('.')[0] # AOI_root = 'AOI' + pkl_name.split('AOI')[-1] # name_root = AOI_root.split('.')[0].replace('PS-RGB_', '') print("name_root:", name_root) # if empty, still add to submission if len(G.nodes()) == 0: wkt_item_root = [name_root, 'LINESTRING EMPTY'] if len(weight_keys) > 0: weights = [0 for w in weight_keys] wkt_list.append(wkt_item_root + weights) else: wkt_list.append(wkt_item_root) # extract geometry pix wkt, save to list seen_edges = set([]) for i, (u, v, attr_dict) in enumerate(G.edges(data=True)): # make sure we haven't already seen this edge if (u, v) in seen_edges or (v, u) in seen_edges: print(u, v, "already catalogued!") continue else: seen_edges.add((u, v)) seen_edges.add((v, u)) geom_pix_wkt = attr_dict['geometry_pix'].wkt # check edge lnegth if attr_dict['length'] > 5000: print("Edge too long!, u,v,data:", u,v,attr_dict) return if verbose: print(i, "/", len(G.edges()), "u, v:", u, v) print(" attr_dict:", attr_dict) print(" geom_pix_wkt:", geom_pix_wkt) wkt_item_root = [name_root, geom_pix_wkt] if len(weight_keys) > 0: weights = [attr_dict[w] for w in weight_keys] if verbose: print(" weights:", weights) wkt_list.append(wkt_item_root + weights) else: wkt_list.append(wkt_item_root) if verbose: print("wkt_list:", wkt_list) # create dataframe if len(weight_keys) > 0: cols = ['ImageId', 'WKT_Pix'] + weight_keys else: cols = ['ImageId', 'WKT_Pix'] # use 'length_m' and 'travel_time_s' instead? cols_new = [] for z in cols: if z == 'length': cols_new.append('length_m') elif z == 'travel_time': cols_new.append('travel_time_s') else: cols_new.append(z) cols = cols_new # cols = [z.replace('length', 'length_m') for z in cols] # cols = [z.replace('travel_time', 'travel_time_s') for z in cols] print("cols:", cols) df = pd.DataFrame(wkt_list, columns=cols) df.loc[:, 'ImageId'] = df.ImageId.apply(lambda name: name[name.find('AOI_'):]) print("df:", df) # save if len(output_csv_path) > 0: df.to_csv(output_csv_path, index=False) return df
Python
def speed_to_burn_func(speed_mph): '''bin every 10 mph or so Convert speed estimate to appropriate channel bin = 0 if speed = 0''' return int(int(math.ceil(speed_mph / bin_size_mph)) * channel_value_mult) # determine num_channels
def speed_to_burn_func(speed_mph): '''bin every 10 mph or so Convert speed estimate to appropriate channel bin = 0 if speed = 0''' return int(int(math.ceil(speed_mph / bin_size_mph)) * channel_value_mult) # determine num_channels
Python
def _random_password(length=20, chars=['ascii_letters', 'digits']): '''Return a random password string of length containing only chars :kwarg length: The number of characters in the new password. Defaults to 20. :kwarg chars: The characters to choose from. The default is all ascii letters, ascii digits, and these symbols ``.,:-_`` ''' assert isinstance(chars, text_type), '%s (%s) is not a text_type' % (chars, type(chars)) random_generator = random.SystemRandom() password = [] while len(password) < length: new_char = random_generator.choice(chars) password.append(new_char) return u''.join(password)
def _random_password(length=20, chars=['ascii_letters', 'digits']): '''Return a random password string of length containing only chars :kwarg length: The number of characters in the new password. Defaults to 20. :kwarg chars: The characters to choose from. The default is all ascii letters, ascii digits, and these symbols ``.,:-_`` ''' assert isinstance(chars, text_type), '%s (%s) is not a text_type' % (chars, type(chars)) random_generator = random.SystemRandom() password = [] while len(password) < length: new_char = random_generator.choice(chars) password.append(new_char) return u''.join(password)
Python
def meta_default(self): """ Compiles and returns the default and common <meta> tags. """ meta = {"title": self.get_title()} # get the description try: meta["description"] = self.soup.find("meta", attrs={"name": "description"})[ "content" ] except Exception: meta["description"] = "" # get the robots meta try: meta["robots"] = self.soup.find("meta", attrs={"name": "robots"})["content"] except Exception: pass # get the viewport try: meta["viewport"] = self.soup.find("meta", attrs={"name": "viewport"})[ "content" ] except Exception: meta["viewport"] = "" # get the canonical tag try: meta["canonical"] = self.soup.find("link", attrs={"rel": "canonical"})[ "href" ] except Exception: pass # get the charset try: meta["charset"] = self.soup.find("meta", attrs={"charset": True})[ "chartset" ] except Exception: pass # return the basic meta tags return meta
def meta_default(self): """ Compiles and returns the default and common <meta> tags. """ meta = {"title": self.get_title()} # get the description try: meta["description"] = self.soup.find("meta", attrs={"name": "description"})[ "content" ] except Exception: meta["description"] = "" # get the robots meta try: meta["robots"] = self.soup.find("meta", attrs={"name": "robots"})["content"] except Exception: pass # get the viewport try: meta["viewport"] = self.soup.find("meta", attrs={"name": "viewport"})[ "content" ] except Exception: meta["viewport"] = "" # get the canonical tag try: meta["canonical"] = self.soup.find("link", attrs={"rel": "canonical"})[ "href" ] except Exception: pass # get the charset try: meta["charset"] = self.soup.find("meta", attrs={"charset": True})[ "chartset" ] except Exception: pass # return the basic meta tags return meta
Python
def meta_opengraph(self): """ Gets and returns the <og:> meta tags. """ og_properties = [ "type", "title", "description", "image", "url", "site_name", ] # common opengraph properties opengraph = {} for i in og_properties: try: opengraph[i] = self.soup.find("meta", attrs={"property": f"og:{i}"})[ "content" ] except Exception: # if the property doens't exist, do nothing pass # return the opengraph meta tags return opengraph
def meta_opengraph(self): """ Gets and returns the <og:> meta tags. """ og_properties = [ "type", "title", "description", "image", "url", "site_name", ] # common opengraph properties opengraph = {} for i in og_properties: try: opengraph[i] = self.soup.find("meta", attrs={"property": f"og:{i}"})[ "content" ] except Exception: # if the property doens't exist, do nothing pass # return the opengraph meta tags return opengraph
Python
def meta_twitter(self): """ Gets and returns the <twitter:> meta tags. """ tw_properties = [ "title", "description", "image", "site", "creator", ] # common twitter meta tags twitter = {} for i in tw_properties: try: twitter[i] = self.soup.find("meta", attrs={"name": f"twitter:{i}"})[ "content" ] except Exception: # if the property doens't exist, do nothing pass # return the opengraph meta tags return twitter
def meta_twitter(self): """ Gets and returns the <twitter:> meta tags. """ tw_properties = [ "title", "description", "image", "site", "creator", ] # common twitter meta tags twitter = {} for i in tw_properties: try: twitter[i] = self.soup.find("meta", attrs={"name": f"twitter:{i}"})[ "content" ] except Exception: # if the property doens't exist, do nothing pass # return the opengraph meta tags return twitter
Python
def _check_merkle_root(self, merkle_root): 'Checks the merkle_root is correct. Internal use.' if merkle_root != self.merkle_root: raise InvalidBlockException('invalid merkle root')
def _check_merkle_root(self, merkle_root): 'Checks the merkle_root is correct. Internal use.' if merkle_root != self.merkle_root: raise InvalidBlockException('invalid merkle root')
Python
def _update_transactions(self, transactions): '''Update the database with the transaction count and attaches the transactions to this Block instance. INTERNAL USE ONLY!!''' cursor = self.__database._cursor() cursor.execute('update blocks set txn_count = ? where id = ?', (len(transactions), self._blockid)) self.__database._connection.commit() self.__data['txns'] = transactions self.__data['txn_count'] = len(transactions)
def _update_transactions(self, transactions): '''Update the database with the transaction count and attaches the transactions to this Block instance. INTERNAL USE ONLY!!''' cursor = self.__database._cursor() cursor.execute('update blocks set txn_count = ? where id = ?', (len(transactions), self._blockid)) self.__database._connection.commit() self.__data['txns'] = transactions self.__data['txn_count'] = len(transactions)
Python
def add_header(self, header): '''Adds a block to the database (if not present) and returns it. If a block's transactions is None, it means that only the block header is present in the database.''' # Calculate the block hash binary_header = header.binary()[:80] block_hash = util.sha256d(binary_header) # Already exists and nothing new existing = self.get(block_hash, orphans = True) if existing: return False # @TODO: Calculate the expected target and make sure the block matches # @TODO: Calculate the valid time range and make sure the block matches # verify the block hits the target if not util.verify_target(self.coin, header): raise InvalidBlockException('block proof-of-work is greater than target') # find the previous block previous_block = self.get(header.prev_block, orphans = True) if not previous_block: raise InvalidBlockException('previous block does not exist') cursor = self._cursor() cursor.execute('begin immediate transaction') # find the top block cursor.execute(self.sql_select + ' where mainchain = 1 order by height desc limit 1') top_block = Block(self, cursor.fetchone()) height = previous_block.height + 1 mainchain = bool(height > top_block.height) # we building off of a sidechain that will become the mainchain? if mainchain and not previous_block.mainchain: # update all blocks from previous_block to the fork as mainchain cur = previous_block while not cur.mainchain: cursor.execute('update blocks set mainchain = 1 where id = ?', (cur._blockid, )) cur = cur.previous_block forked_at = cur.hash # update all blocks from the old top (now orphan) to the fork as not mainchain cur = top_block while cur.hash != forked_at: cursor.execute('update blocks set mainchain = 0 where id = ?', (cur._blockid, )) cur = cur.previous_block # add the block to the database cursor = self._cursor() row = (previous_block._blockid, buffer(block_hash), header.version, buffer(header.merkle_root), header.timestamp, header.bits, header.nonce, height, 0, mainchain) cursor.execute(self.sql_insert, row) #lastrowid = cursor.lastrowid self._connection.commit() return True
def add_header(self, header): '''Adds a block to the database (if not present) and returns it. If a block's transactions is None, it means that only the block header is present in the database.''' # Calculate the block hash binary_header = header.binary()[:80] block_hash = util.sha256d(binary_header) # Already exists and nothing new existing = self.get(block_hash, orphans = True) if existing: return False # @TODO: Calculate the expected target and make sure the block matches # @TODO: Calculate the valid time range and make sure the block matches # verify the block hits the target if not util.verify_target(self.coin, header): raise InvalidBlockException('block proof-of-work is greater than target') # find the previous block previous_block = self.get(header.prev_block, orphans = True) if not previous_block: raise InvalidBlockException('previous block does not exist') cursor = self._cursor() cursor.execute('begin immediate transaction') # find the top block cursor.execute(self.sql_select + ' where mainchain = 1 order by height desc limit 1') top_block = Block(self, cursor.fetchone()) height = previous_block.height + 1 mainchain = bool(height > top_block.height) # we building off of a sidechain that will become the mainchain? if mainchain and not previous_block.mainchain: # update all blocks from previous_block to the fork as mainchain cur = previous_block while not cur.mainchain: cursor.execute('update blocks set mainchain = 1 where id = ?', (cur._blockid, )) cur = cur.previous_block forked_at = cur.hash # update all blocks from the old top (now orphan) to the fork as not mainchain cur = top_block while cur.hash != forked_at: cursor.execute('update blocks set mainchain = 0 where id = ?', (cur._blockid, )) cur = cur.previous_block # add the block to the database cursor = self._cursor() row = (previous_block._blockid, buffer(block_hash), header.version, buffer(header.merkle_root), header.timestamp, header.bits, header.nonce, height, 0, mainchain) cursor.execute(self.sql_insert, row) #lastrowid = cursor.lastrowid self._connection.commit() return True
Python
def _get(self, blockid): 'Return a block for a blockid. Internal use only.' cursor = self._cursor() cursor.execute(self.sql_select + ' where id = ?', (blockid, )) row = cursor.fetchone() if row: return Block(self, row) return None
def _get(self, blockid): 'Return a block for a blockid. Internal use only.' cursor = self._cursor() cursor.execute(self.sql_select + ' where id = ?', (blockid, )) row = cursor.fetchone() if row: return Block(self, row) return None
Python
def block_locator_hashes(self): 'Return a list of hashes suitable as a block locator hash.' # Find the height of the block chain hashes = [ ] # First 10... offset = 0 cursor = self._cursor() cursor.execute('select hash, height from blocks where mainchain = 1 and height > 0 order by height desc limit 10') rows = cursor.fetchall() hashes.extend([str(hash) for (hash, offset) in rows]) offset -= 1 # ...then step down by twice the previous step... if offset > 0: for i in xrange(1, int(math.log(2 * offset, 2))): if offset <= 1: break cursor.execute('select hash from blocks where mainchain = 1 and height = ?', (offset, )) hashes.append(str(cursor.fetchone()[0])) offset -= (1 << i) # ...finally the genesis hash hashes.append(self.coin.genesis_block_hash) return hashes
def block_locator_hashes(self): 'Return a list of hashes suitable as a block locator hash.' # Find the height of the block chain hashes = [ ] # First 10... offset = 0 cursor = self._cursor() cursor.execute('select hash, height from blocks where mainchain = 1 and height > 0 order by height desc limit 10') rows = cursor.fetchall() hashes.extend([str(hash) for (hash, offset) in rows]) offset -= 1 # ...then step down by twice the previous step... if offset > 0: for i in xrange(1, int(math.log(2 * offset, 2))): if offset <= 1: break cursor.execute('select hash from blocks where mainchain = 1 and height = ?', (offset, )) hashes.append(str(cursor.fetchone()[0])) offset -= (1 << i) # ...finally the genesis hash hashes.append(self.coin.genesis_block_hash) return hashes
Python
def _TODO_generate_checkpoint_list(self): '''Build a list of hashes that make good checkpoints. What makes a good checkoint? - all nearby blocks have monotonic timestamps - contains no "strange" transactions See: https://github.com/bitcoin/bitcoin/blob/master/src/checkpoints.cpp''' pass
def _TODO_generate_checkpoint_list(self): '''Build a list of hashes that make good checkpoints. What makes a good checkoint? - all nearby blocks have monotonic timestamps - contains no "strange" transactions See: https://github.com/bitcoin/bitcoin/blob/master/src/checkpoints.cpp''' pass
Python
def twos_comp(val, bits): "Compute the 2's compliment of val with the width bits." if (val & (1 << (bits - 1))): val = val - (1 << bits) return val
def twos_comp(val, bits): "Compute the 2's compliment of val with the width bits." if (val & (1 << (bits - 1))): val = val - (1 << bits) return val
Python
def sigdecode_der(sig_der, order): '''We use a slightly more liberal der decoder because sometimes signatures seem to have trailing 0 bytes. (see block bitcoin@135106)''' rs_strings, empty = der.remove_sequence(sig_der) #if empty != b("") and empty.strip(chr(0)): # raise der.UnexpectedDER("trailing junk after DER sig: %s" % # binascii.hexlify(empty)) r, rest = remove_integer(rs_strings) s, empty = remove_integer(rest) #if empty != b("") and empty.strip(chr(0)): # raise der.UnexpectedDER("trailing junk after DER numbers: %s" % # binascii.hexlify(empty)) #if s < 0: # s = s % order return r, s
def sigdecode_der(sig_der, order): '''We use a slightly more liberal der decoder because sometimes signatures seem to have trailing 0 bytes. (see block bitcoin@135106)''' rs_strings, empty = der.remove_sequence(sig_der) #if empty != b("") and empty.strip(chr(0)): # raise der.UnexpectedDER("trailing junk after DER sig: %s" % # binascii.hexlify(empty)) r, rest = remove_integer(rs_strings) s, empty = remove_integer(rest) #if empty != b("") and empty.strip(chr(0)): # raise der.UnexpectedDER("trailing junk after DER numbers: %s" % # binascii.hexlify(empty)) #if s < 0: # s = s % order return r, s
Python
def combine_private_keys(private_keys): 'Returns the private key generated by combining two private keys.' # convert private keys to binary form private_keys = [privkey_from_wif(k) for k in private_keys] # decode the secret exponents secexps = [string_to_number(k) for k in private_keys] # add_mod them together combined = sum(secexps) % curve.order # convert into a wif encode key private_key = number_to_string(combined, curve.order) return privkey_to_wif(private_key)
def combine_private_keys(private_keys): 'Returns the private key generated by combining two private keys.' # convert private keys to binary form private_keys = [privkey_from_wif(k) for k in private_keys] # decode the secret exponents secexps = [string_to_number(k) for k in private_keys] # add_mod them together combined = sum(secexps) % curve.order # convert into a wif encode key private_key = number_to_string(combined, curve.order) return privkey_to_wif(private_key)
Python
def split_private_key(private_key, count = 2): '''Splits a private key up into count private keys, all of which are required to be combined back into the original key.''' # convert and decode private key to secret exponent private_key = privkey_from_wif(private_key) secexp = string_to_number(private_key) # generate random secret exponents, less one secexps = [randrange(curve.order) for i in xrange(count - 1)] # compute the missing secret exponent that will sum to the given key secexp_missing = (secexp - sum(secexps)) % curve.order secexps.append(secexp_missing) # convert to wif encoded private keys private_keys = [number_to_string(s, curve.order) for s in secexps] return [privkey_to_wif(k) for k in private_keys]
def split_private_key(private_key, count = 2): '''Splits a private key up into count private keys, all of which are required to be combined back into the original key.''' # convert and decode private key to secret exponent private_key = privkey_from_wif(private_key) secexp = string_to_number(private_key) # generate random secret exponents, less one secexps = [randrange(curve.order) for i in xrange(count - 1)] # compute the missing secret exponent that will sum to the given key secexp_missing = (secexp - sum(secexps)) % curve.order secexps.append(secexp_missing) # convert to wif encoded private keys private_keys = [number_to_string(s, curve.order) for s in secexps] return [privkey_to_wif(k) for k in private_keys]
Python
def partial_combine_private_keys(private_keys, ignore_errors = False): '''Returns the combined private key from the relevant private keys in private_keys, or None if insufficient private keys are provided. If ignore_errors (default is False), then any key that does not fit with the rest of the keys will raise a ValueError. This is EXPERIMENTAL, and may change in the future.''' parts = dict() required = None checksum = None # for each key... for key in private_keys: # ...convert private keys to binary form private_key = decode_check(key) if not private_key.startswith('\x10\x01'): raise ValueError('invalid combined key: %s' % key) # ...verify the required number of keys r = ord(private_key[3]) if required is None: required = r elif required != r: if ignore_errors: continue raise ValueError('key does not match set: %s' % key) # ...verify the checksum c = private_key[4:8] if checksum is None: checksum = c elif checksum != c: if ignore_errors: continue raise ValueError('key checksum does not match set: %s' % key) # ...add this key to the correct key-set index = ord(private_key[2]) if index not in parts: parts[index] = set() parts[index].add(private_key[8:]) # find (if any) a complete key-set for group in parts.values(): if len(group) == required: # combine the private keys and wif encode it secexp = sum(string_to_number(k) for k in group) % curve.order private_key = number_to_string(secexp, curve.order) if sha256d(private_key)[:4] != checksum: raise ValueError('checksum does not match') return privkey_to_wif(private_key) return None
def partial_combine_private_keys(private_keys, ignore_errors = False): '''Returns the combined private key from the relevant private keys in private_keys, or None if insufficient private keys are provided. If ignore_errors (default is False), then any key that does not fit with the rest of the keys will raise a ValueError. This is EXPERIMENTAL, and may change in the future.''' parts = dict() required = None checksum = None # for each key... for key in private_keys: # ...convert private keys to binary form private_key = decode_check(key) if not private_key.startswith('\x10\x01'): raise ValueError('invalid combined key: %s' % key) # ...verify the required number of keys r = ord(private_key[3]) if required is None: required = r elif required != r: if ignore_errors: continue raise ValueError('key does not match set: %s' % key) # ...verify the checksum c = private_key[4:8] if checksum is None: checksum = c elif checksum != c: if ignore_errors: continue raise ValueError('key checksum does not match set: %s' % key) # ...add this key to the correct key-set index = ord(private_key[2]) if index not in parts: parts[index] = set() parts[index].add(private_key[8:]) # find (if any) a complete key-set for group in parts.values(): if len(group) == required: # combine the private keys and wif encode it secexp = sum(string_to_number(k) for k in group) % curve.order private_key = number_to_string(secexp, curve.order) if sha256d(private_key)[:4] != checksum: raise ValueError('checksum does not match') return privkey_to_wif(private_key) return None
Python
def partial_split_qr_encode(private_keys): 'Encode a partial key-set appropriate for a QR code.' required = None checksum = None binary = [] for private_key in map(decode_check, private_keys): if private_key[0:2] != '\x10\x01': raise ValueError('invalid combined key') if required is None: required = ord(private_key[3]) elif ord(private_key[3]) != required: raise ValueError('unmatched private keys') if checksum is None: checksum = private_key[4:8] elif private_key[4:8] != checksum: raise ValueError('unmatched private keys') binary.append((ord(private_key[2]), private_key[8:])) binary.sort() missing = len(binary) for i in xrange(0, len(binary)): if binary[i][0] != i: missing = i break qr = '\x84\x7c\x20' + chr(missing) + chr(required) + checksum for private_key in binary: qr += private_key[1] return base64.b32encode(qr).strip('=')
def partial_split_qr_encode(private_keys): 'Encode a partial key-set appropriate for a QR code.' required = None checksum = None binary = [] for private_key in map(decode_check, private_keys): if private_key[0:2] != '\x10\x01': raise ValueError('invalid combined key') if required is None: required = ord(private_key[3]) elif ord(private_key[3]) != required: raise ValueError('unmatched private keys') if checksum is None: checksum = private_key[4:8] elif private_key[4:8] != checksum: raise ValueError('unmatched private keys') binary.append((ord(private_key[2]), private_key[8:])) binary.sort() missing = len(binary) for i in xrange(0, len(binary)): if binary[i][0] != i: missing = i break qr = '\x84\x7c\x20' + chr(missing) + chr(required) + checksum for private_key in binary: qr += private_key[1] return base64.b32encode(qr).strip('=')
Python
def partial_split_qr_decode(qr_code): 'Decode a partial key-set QR code.' # calculate padding that would have been stripped padding = (len(qr_code) * 5 - 9 * 8) % 32 binary = base64.b32decode(qr_code + ('=' * padding)) # check the header if not binary.startswith('\x84\x7c\x20'): raise ValueError('invalid header') # the missing index for this set missing = ord(binary[3]) required = ord(binary[4]) checksum = binary[5:9] # extract each binary key and recompose the key keys = set() start = 9 index = 0 while start + 32 <= len(binary): if index == missing: index += 1 key = '\x10\x01' + chr(index) + chr(required) + checksum + binary[start:start + 32] keys.add(encode_check(key)) start += 32 index += 1 return keys
def partial_split_qr_decode(qr_code): 'Decode a partial key-set QR code.' # calculate padding that would have been stripped padding = (len(qr_code) * 5 - 9 * 8) % 32 binary = base64.b32decode(qr_code + ('=' * padding)) # check the header if not binary.startswith('\x84\x7c\x20'): raise ValueError('invalid header') # the missing index for this set missing = ord(binary[3]) required = ord(binary[4]) checksum = binary[5:9] # extract each binary key and recompose the key keys = set() start = 9 index = 0 while start + 32 <= len(binary): if index == missing: index += 1 key = '\x10\x01' + chr(index) + chr(required) + checksum + binary[start:start + 32] keys.add(encode_check(key)) start += 32 index += 1 return keys
Python
def _stack_op(stack, func): '''Replaces the top N items from the stack with the items in the list returned by the callable func; N is func's argument count. The result must return a list. False is returned on error, otherwise True.''' # not enough arguments count = len(inspect.getargspec(func).args) if len(stack) < count: return False args = stack[-count:] stack[-count:] = [] # add each returned item onto the stack for item in func(*args): stack.append(item) return True
def _stack_op(stack, func): '''Replaces the top N items from the stack with the items in the list returned by the callable func; N is func's argument count. The result must return a list. False is returned on error, otherwise True.''' # not enough arguments count = len(inspect.getargspec(func).args) if len(stack) < count: return False args = stack[-count:] stack[-count:] = [] # add each returned item onto the stack for item in func(*args): stack.append(item) return True
Python
def _math_op(stack, func, check_overflow = True): '''Replaces the top N items from the stack with the result of the callable func; N is func's argument count. A boolean result will push either a 0 or 1 on the stack. None will push nothing. Otherwise, the result must be a ByteVector. False is returned on error, otherwise True.''' # not enough arguments count = len(inspect.getargspec(func).args) if len(stack) < count: return False args = stack[-count:] stack[-count:] = [] # check for overflow if check_overflow: for arg in args: if len(arg) > 4: return False # compute the result result = func(*args) # convert booleans to One or Zero if result == True: result = One elif result == False: result = Zero if result is not None: stack.append(result) return True
def _math_op(stack, func, check_overflow = True): '''Replaces the top N items from the stack with the result of the callable func; N is func's argument count. A boolean result will push either a 0 or 1 on the stack. None will push nothing. Otherwise, the result must be a ByteVector. False is returned on error, otherwise True.''' # not enough arguments count = len(inspect.getargspec(func).args) if len(stack) < count: return False args = stack[-count:] stack[-count:] = [] # check for overflow if check_overflow: for arg in args: if len(arg) > 4: return False # compute the result result = func(*args) # convert booleans to One or Zero if result == True: result = One elif result == False: result = Zero if result is not None: stack.append(result) return True
Python
def _hash_op(stack, func): '''Replaces the top of the stack with the result of the callable func. The result must be a ByteVector. False is returned on error, otherwise True.''' # not enough arguments if len(stack) < 1: return False # hash and push value = func(stack.pop().vector) stack.append(ByteVector(value)) return True
def _hash_op(stack, func): '''Replaces the top of the stack with the result of the callable func. The result must be a ByteVector. False is returned on error, otherwise True.''' # not enough arguments if len(stack) < 1: return False # hash and push value = func(stack.pop().vector) stack.append(ByteVector(value)) return True
Python
def match_template(self, template): 'Given a template, return True if this script matches.' if not template[0](self): return False # ((opcode, bytes, value), template_target) for ((o, b, v), t) in zip(self._tokens, template[1:]): # callable, check the value if callable(t): if not t(o, b, v): return False # otherwise, compare opcode elif t != o: return False return True
def match_template(self, template): 'Given a template, return True if this script matches.' if not template[0](self): return False # ((opcode, bytes, value), template_target) for ((o, b, v), t) in zip(self._tokens, template[1:]): # callable, check the value if callable(t): if not t(o, b, v): return False # otherwise, compare opcode elif t != o: return False return True
Python
def _process(self, script): 'Parse the script into tokens. Internal use only.' while script: opcode = ord(script[0]) bytes = script[0] script = script[1:] value = None verify = False if opcode == opcodes.OP_0: value = Zero opcode = Tokenizer.OP_LITERAL elif 1 <= opcode <= 78: length = opcode if opcodes.OP_PUSHDATA1 <= opcode <= opcodes.OP_PUSHDATA4: op_length = [1, 2, 4][opcode - opcodes.OP_PUSHDATA1] format = ['<B', '<H', '<I'][opcode - opcodes.OP_PUSHDATA1] length = struct.unpack(format, script[:op_length])[0] bytes += script[:op_length] script = script[op_length:] value = ByteVector(vector = script[:length]) bytes += script[:length] script = script[length:] if len(value) != length: raise Exception('not enought script for literal') opcode = Tokenizer.OP_LITERAL elif opcode == opcodes.OP_1NEGATE: opcode = Tokenizer.OP_LITERAL value = ByteVector.from_value(-1) elif opcode == opcodes.OP_TRUE: opcode = Tokenizer.OP_LITERAL value = ByteVector.from_value(1) elif opcodes.OP_1 <= opcode <= opcodes.OP_16: value = ByteVector.from_value(opcode - opcodes.OP_1 + 1) opcode = Tokenizer.OP_LITERAL elif self._expand_verify and opcode in self._Verify: opcode = self._Verify[opcode] verify = True self._tokens.append((opcode, bytes, value)) if verify: self._tokens.append((opcodes.OP_VERIFY, '', None))
def _process(self, script): 'Parse the script into tokens. Internal use only.' while script: opcode = ord(script[0]) bytes = script[0] script = script[1:] value = None verify = False if opcode == opcodes.OP_0: value = Zero opcode = Tokenizer.OP_LITERAL elif 1 <= opcode <= 78: length = opcode if opcodes.OP_PUSHDATA1 <= opcode <= opcodes.OP_PUSHDATA4: op_length = [1, 2, 4][opcode - opcodes.OP_PUSHDATA1] format = ['<B', '<H', '<I'][opcode - opcodes.OP_PUSHDATA1] length = struct.unpack(format, script[:op_length])[0] bytes += script[:op_length] script = script[op_length:] value = ByteVector(vector = script[:length]) bytes += script[:length] script = script[length:] if len(value) != length: raise Exception('not enought script for literal') opcode = Tokenizer.OP_LITERAL elif opcode == opcodes.OP_1NEGATE: opcode = Tokenizer.OP_LITERAL value = ByteVector.from_value(-1) elif opcode == opcodes.OP_TRUE: opcode = Tokenizer.OP_LITERAL value = ByteVector.from_value(1) elif opcodes.OP_1 <= opcode <= opcodes.OP_16: value = ByteVector.from_value(opcode - opcodes.OP_1 + 1) opcode = Tokenizer.OP_LITERAL elif self._expand_verify and opcode in self._Verify: opcode = self._Verify[opcode] verify = True self._tokens.append((opcode, bytes, value)) if verify: self._tokens.append((opcodes.OP_VERIFY, '', None))
Python
def verify(self): '''Return True if all transaction inputs can be verified against their previous output.''' for i in xrange(0, len(self._transaction.inputs)): # ignore coinbase (generation transaction input) if self._transaction.index == 0 and i == 0: continue # verify the input with its previous output input = self._transaction.inputs[i] previous_output = self._transaction.previous_output(i) if not self.verify_input(i, previous_output.pk_script): #print "INVALID:", self._transaction.hash.encode('hex'), i return False return True
def verify(self): '''Return True if all transaction inputs can be verified against their previous output.''' for i in xrange(0, len(self._transaction.inputs)): # ignore coinbase (generation transaction input) if self._transaction.index == 0 and i == 0: continue # verify the input with its previous output input = self._transaction.inputs[i] previous_output = self._transaction.previous_output(i) if not self.verify_input(i, previous_output.pk_script): #print "INVALID:", self._transaction.hash.encode('hex'), i return False return True
Python
def der_encode(r, s): """ DER-encodes a signed tx. https://bitcoin.stackexchange.com/questions/12554/why-the-signature-is-always-65-13232-bytes-long https://github.com/bitcoin/bitcoin/blob/ce74799a3c21355b35fed923106d13a0f8133721/src/script/interpreter.cpp#L108 """ r_len = sizeof(r) s_len = sizeof(s) total_len = (4 + r_len + s_len) # 5 = 02 + r_len + 02 + s_len (all 1 byte) return b'\x30' + total_len.to_bytes(sizeof(total_len), 'big') + b'\x02' + r_len.to_bytes(sizeof(r_len), 'big') \ + r.to_bytes(sizeof(r), 'big') + b'\x02' + s_len.to_bytes(sizeof(s_len), 'big') + s.to_bytes(sizeof(s), 'big')
def der_encode(r, s): """ DER-encodes a signed tx. https://bitcoin.stackexchange.com/questions/12554/why-the-signature-is-always-65-13232-bytes-long https://github.com/bitcoin/bitcoin/blob/ce74799a3c21355b35fed923106d13a0f8133721/src/script/interpreter.cpp#L108 """ r_len = sizeof(r) s_len = sizeof(s) total_len = (4 + r_len + s_len) # 5 = 02 + r_len + 02 + s_len (all 1 byte) return b'\x30' + total_len.to_bytes(sizeof(total_len), 'big') + b'\x02' + r_len.to_bytes(sizeof(r_len), 'big') \ + r.to_bytes(sizeof(r), 'big') + b'\x02' + s_len.to_bytes(sizeof(s_len), 'big') + s.to_bytes(sizeof(s), 'big')
Python
def send(self, call, params=[]): """ Makes a RPC call to the daemon. :param call: The method to send, as string. :param params: The parameters to send with the method. :return: The JSON response. """ payload = {'jsonrpc': '1.0', 'id': call, 'method': call, 'params': params} r = self.session.post(self.url, json=payload) try: return r.json() except: raise Exception('An error occured while parsing daemon response as json. Is the node up and synced ? ', r.text)
def send(self, call, params=[]): """ Makes a RPC call to the daemon. :param call: The method to send, as string. :param params: The parameters to send with the method. :return: The JSON response. """ payload = {'jsonrpc': '1.0', 'id': call, 'method': call, 'params': params} r = self.session.post(self.url, json=payload) try: return r.json() except: raise Exception('An error occured while parsing daemon response as json. Is the node up and synced ? ', r.text)
Python
def parse(self) : """ Parses and serializes a script. :return: The serialized script, as bytes. """ # Parsing the string instructions = self.script.split(' ') serialized = b'' # Filling with the corresponding OPCODEs for i in instructions: if i in opcodes.OPCODE_NAMES: op = opcodes.OPCODE_NAMES.index(i) serialized += op.to_bytes(sizeof(op), 'big') else: # There may be some hex numbers in the script which are not OPCODE try: value = int(i, 16) length = sizeof(value) serialized += length.to_bytes(sizeof(length), 'big') + value.to_bytes(sizeof(value), 'big') except: raise Exception('Unexpected instruction in script : {}'.format(i)) if len(serialized) > 10000: raise Exception('Serialized script should be less than 10,000 bytes long') return serialized
def parse(self) : """ Parses and serializes a script. :return: The serialized script, as bytes. """ # Parsing the string instructions = self.script.split(' ') serialized = b'' # Filling with the corresponding OPCODEs for i in instructions: if i in opcodes.OPCODE_NAMES: op = opcodes.OPCODE_NAMES.index(i) serialized += op.to_bytes(sizeof(op), 'big') else: # There may be some hex numbers in the script which are not OPCODE try: value = int(i, 16) length = sizeof(value) serialized += length.to_bytes(sizeof(length), 'big') + value.to_bytes(sizeof(value), 'big') except: raise Exception('Unexpected instruction in script : {}'.format(i)) if len(serialized) > 10000: raise Exception('Serialized script should be less than 10,000 bytes long') return serialized
Python
def fetch_script(self, bitcoind): """ Fetches the script which locks this output. :param bitcoind: The instance to ask the script from. :return: The script as bytes. """ txid = str(binascii.hexlify(self.txid), 'ascii') index = int.from_bytes(self.index, 'little') response = bitcoind.send('getrawtransaction', [txid, 1]) try: return binascii.unhexlify(response['result']['vout'][index]['scriptPubKey']['hex']) except: raise Exception('Error when parsing response from daemon. ', response)
def fetch_script(self, bitcoind): """ Fetches the script which locks this output. :param bitcoind: The instance to ask the script from. :return: The script as bytes. """ txid = str(binascii.hexlify(self.txid), 'ascii') index = int.from_bytes(self.index, 'little') response = bitcoind.send('getrawtransaction', [txid, 1]) try: return binascii.unhexlify(response['result']['vout'][index]['scriptPubKey']['hex']) except: raise Exception('Error when parsing response from daemon. ', response)
Python
def print(self): """ Displays the decoded transaction in a JSON-like way. This method is quite messy. Actually, this function IS messy. """ assert self.serialized is not None tx = self.serialized print('{') print(' version : ', binascii.hexlify(tx[:4]), ',') print(' input_count : ', tx[4], ',') i = 0 for input in self.vin: print(' input : ') print(' prev_hash : ', binascii.hexlify(tx[i+5:i+37]), ',') print(' index : ', binascii.hexlify(tx[i+37:i+41]), ',') scriptsig_len = tx[i+41] print(' scriptsig_len : ', scriptsig_len, ',') print(' scriptsig : ', binascii.hexlify(tx[i+42:i+42 + scriptsig_len]), ',') print(' sequence', binascii.hexlify(tx[i+42 + scriptsig_len:i + 42 + scriptsig_len + 4]), ',') i = i + 42 + scriptsig_len - 1 i = i + 5 output_count = tx[i] print(' output_count :', output_count, ',') j = 0 while j < output_count: print(' output ' + str(j) + ' :') print(' value : ', binascii.hexlify(tx[i+1:i+9]), int.from_bytes(tx[i+1:i+9], 'little'), ',') script_length = tx[i+9] print(' script_length : ', script_length, ',') print(' scriptpubkey : ', binascii.hexlify(tx[i+10:i+10+script_length]), ',') j += 1 i = i+9+script_length print(' locktime : ', binascii.hexlify(tx[i+1:i+5]), ',') print('}')
def print(self): """ Displays the decoded transaction in a JSON-like way. This method is quite messy. Actually, this function IS messy. """ assert self.serialized is not None tx = self.serialized print('{') print(' version : ', binascii.hexlify(tx[:4]), ',') print(' input_count : ', tx[4], ',') i = 0 for input in self.vin: print(' input : ') print(' prev_hash : ', binascii.hexlify(tx[i+5:i+37]), ',') print(' index : ', binascii.hexlify(tx[i+37:i+41]), ',') scriptsig_len = tx[i+41] print(' scriptsig_len : ', scriptsig_len, ',') print(' scriptsig : ', binascii.hexlify(tx[i+42:i+42 + scriptsig_len]), ',') print(' sequence', binascii.hexlify(tx[i+42 + scriptsig_len:i + 42 + scriptsig_len + 4]), ',') i = i + 42 + scriptsig_len - 1 i = i + 5 output_count = tx[i] print(' output_count :', output_count, ',') j = 0 while j < output_count: print(' output ' + str(j) + ' :') print(' value : ', binascii.hexlify(tx[i+1:i+9]), int.from_bytes(tx[i+1:i+9], 'little'), ',') script_length = tx[i+9] print(' script_length : ', script_length, ',') print(' scriptpubkey : ', binascii.hexlify(tx[i+10:i+10+script_length]), ',') j += 1 i = i+9+script_length print(' locktime : ', binascii.hexlify(tx[i+1:i+5]), ',') print('}')
Python
def create_and_sign(self, privkey, pubkey): """ Creates a raw transaction and signs it. :param privkey: The key to sign the tx with. :param pubkey: The corresponding public key. :return: A serialized and signed Bitcoin transaction. """ self.sign_outputs(privkey, pubkey) return self.serialize()
def create_and_sign(self, privkey, pubkey): """ Creates a raw transaction and signs it. :param privkey: The key to sign the tx with. :param pubkey: The corresponding public key. :return: A serialized and signed Bitcoin transaction. """ self.sign_outputs(privkey, pubkey) return self.serialize()
Python
def send(self): """ Sends the transaction to the network. """ # Monkey patching of hex() erasing leading 0s tx = '0' + hex(int.from_bytes(self.serialized, 'big'))[2:] response = self.network.send('sendrawtransaction', params=[tx]) if not response['error']: self.id = response['result'] return True else: return response['error']
def send(self): """ Sends the transaction to the network. """ # Monkey patching of hex() erasing leading 0s tx = '0' + hex(int.from_bytes(self.serialized, 'big'))[2:] response = self.network.send('sendrawtransaction', params=[tx]) if not response['error']: self.id = response['result'] return True else: return response['error']
Python
def send_the_hard_way(self, ip): """ Sends a transaction to the network without using RPC, just a raw network message. https://en.bitcoin.it/wiki/Protocol_documentation :param ip: The node to which send the message. A string. """ # First the version message # https://en.bitcoin.it/wiki/Version_Handshake magic = 0xddb8c2fd.to_bytes(4, 'little') version = int(70003).to_bytes(4, 'little') services = int(1).to_bytes(8, 'little') timestamp = int(time.time()).to_bytes(8, 'little') myip = socket.inet_aton(requests.get('https://api.ipify.org').text) nodeip = socket.inet_aton(ip) # 7333 -> insacoin addr_recv = services + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff' + myip + int(7333).to_bytes(2, 'big') addr_from = services + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff' + nodeip + int(7333).to_bytes(2, 'big') nonce = 0x00.to_bytes(8, 'little') user_agent = 0x00.to_bytes(1, 'big') start_height = 0x00.to_bytes(4, 'little') payload = version + services + timestamp + addr_recv + addr_from + nonce + user_agent + start_height checksum = double_sha256(payload, bin=True)[:4] payload_length = len(payload) # NULL padded ascii command version_message = magic + 'version'.encode('ascii') + b'\x00\x00\x00\x00\x00' + payload_length.to_bytes(4, 'little') + checksum + payload # Now the tx message checksum = double_sha256(self.serialized, bin=True)[:4] tx_length = len(self.serialized) tx_message = magic + 'tx'.encode('ascii') + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + tx_length.to_bytes(4, 'little') + checksum + self.serialized # Now the verack message checksum = double_sha256(b'', bin=True)[:4] verack_message = magic + 'verack'.encode('ascii') + b'\x00\x00\x00\x00\x00\x00' + 0x00.to_bytes(4, 'little') + checksum # Now let's connect to the node and send it our messages s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((ip, 7333)) s.send(version_message) s.recv(1000) # receive version + verack s.send(verack_message) s.send(tx_message) print('Error message : ') print(s.recv(1000))
def send_the_hard_way(self, ip): """ Sends a transaction to the network without using RPC, just a raw network message. https://en.bitcoin.it/wiki/Protocol_documentation :param ip: The node to which send the message. A string. """ # First the version message # https://en.bitcoin.it/wiki/Version_Handshake magic = 0xddb8c2fd.to_bytes(4, 'little') version = int(70003).to_bytes(4, 'little') services = int(1).to_bytes(8, 'little') timestamp = int(time.time()).to_bytes(8, 'little') myip = socket.inet_aton(requests.get('https://api.ipify.org').text) nodeip = socket.inet_aton(ip) # 7333 -> insacoin addr_recv = services + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff' + myip + int(7333).to_bytes(2, 'big') addr_from = services + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff' + nodeip + int(7333).to_bytes(2, 'big') nonce = 0x00.to_bytes(8, 'little') user_agent = 0x00.to_bytes(1, 'big') start_height = 0x00.to_bytes(4, 'little') payload = version + services + timestamp + addr_recv + addr_from + nonce + user_agent + start_height checksum = double_sha256(payload, bin=True)[:4] payload_length = len(payload) # NULL padded ascii command version_message = magic + 'version'.encode('ascii') + b'\x00\x00\x00\x00\x00' + payload_length.to_bytes(4, 'little') + checksum + payload # Now the tx message checksum = double_sha256(self.serialized, bin=True)[:4] tx_length = len(self.serialized) tx_message = magic + 'tx'.encode('ascii') + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + tx_length.to_bytes(4, 'little') + checksum + self.serialized # Now the verack message checksum = double_sha256(b'', bin=True)[:4] verack_message = magic + 'verack'.encode('ascii') + b'\x00\x00\x00\x00\x00\x00' + 0x00.to_bytes(4, 'little') + checksum # Now let's connect to the node and send it our messages s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((ip, 7333)) s.send(version_message) s.recv(1000) # receive version + verack s.send(verack_message) s.send(tx_message) print('Error message : ') print(s.recv(1000))
Python
def generate(compressed = True, coin = coins.Bitcoin): 'Generate a new random address.' secexp = randrange(curve.order) key = number_to_string(secexp, curve.order) if compressed: key = key + chr(0x01) return Address(private_key = util.key.privkey_to_wif(key), coin = coin)
def generate(compressed = True, coin = coins.Bitcoin): 'Generate a new random address.' secexp = randrange(curve.order) key = number_to_string(secexp, curve.order) if compressed: key = key + chr(0x01) return Address(private_key = util.key.privkey_to_wif(key), coin = coin)
Python
def decrypt(self, passphrase): 'Return a decrypted address of this address, using passphrase.' # what function do we use to decrypt? if self._privkey[1] == chr(0x42): decrypt = _decrypt_private_key else: decrypt = _decrypt_printed_private_key return decrypt(self.private_key, passphrase, self.coin)
def decrypt(self, passphrase): 'Return a decrypted address of this address, using passphrase.' # what function do we use to decrypt? if self._privkey[1] == chr(0x42): decrypt = _decrypt_private_key else: decrypt = _decrypt_printed_private_key return decrypt(self.private_key, passphrase, self.coin)
Python
def _key_from_point(point, compressed): 'Converts a point into a key.' key = (chr(0x04) + number_to_string(point.x(), curve.order) + number_to_string(point.y(), curve.order)) if compressed: key = util.key.compress_public_key(key) return key
def _key_from_point(point, compressed): 'Converts a point into a key.' key = (chr(0x04) + number_to_string(point.x(), curve.order) + number_to_string(point.y(), curve.order)) if compressed: key = util.key.compress_public_key(key) return key
Python
def _key_to_point(key): 'Converts a key to an EC Point.' key = util.key.decompress_public_key(key) x = string_to_number(key[1:33]) y = string_to_number(key[33:65]) return util.ecc.point(x, y)
def _key_to_point(key): 'Converts a key to an EC Point.' key = util.key.decompress_public_key(key) x = string_to_number(key[1:33]) y = string_to_number(key[33:65]) return util.ecc.point(x, y)
Python
def _generate_intermediate_code(passphrase, lot = None, sequence = None): 'Generates a new intermediate code for passphrase.' if (lot is None) ^ (sequence is None): raise ValueError('must specify both or neither of lot and sequence') if lot and not (0 <= lot <= 0xfffff): raise ValueError('lot is out of range') if sequence and not (0 <= sequence <= 0xfff): raise ValueError('lot is out of range') # compute owner salt and entropy if lot is None: owner_salt = os.urandom(8) owner_entropy = owner_salt else: owner_salt = os.urandom(4) lot_sequence = struct.pack('>I', (lot << 12) | sequence) owner_entropy = owner_salt + lot_sequence prefactor = util.scrypt(_normalize_utf(passphrase), owner_salt, 16384, 8, 8, 32) if lot is None: pass_factor = string_to_number(prefactor) else: pass_factor = string_to_number(util.hash.sha256d(prefactor + owner_entropy)) # compute the public point point = curve.generator * pass_factor pass_point = _key_from_point(point, True) prefix = '\x2c\xe9\xb3\xe1\xff\x39\xe2' if lot is None: prefix += chr(0x53) else: prefix += chr(0x51) # make a nice human readable string, beginning with "passphrase" return util.base58.encode_check(prefix + owner_entropy + pass_point)
def _generate_intermediate_code(passphrase, lot = None, sequence = None): 'Generates a new intermediate code for passphrase.' if (lot is None) ^ (sequence is None): raise ValueError('must specify both or neither of lot and sequence') if lot and not (0 <= lot <= 0xfffff): raise ValueError('lot is out of range') if sequence and not (0 <= sequence <= 0xfff): raise ValueError('lot is out of range') # compute owner salt and entropy if lot is None: owner_salt = os.urandom(8) owner_entropy = owner_salt else: owner_salt = os.urandom(4) lot_sequence = struct.pack('>I', (lot << 12) | sequence) owner_entropy = owner_salt + lot_sequence prefactor = util.scrypt(_normalize_utf(passphrase), owner_salt, 16384, 8, 8, 32) if lot is None: pass_factor = string_to_number(prefactor) else: pass_factor = string_to_number(util.hash.sha256d(prefactor + owner_entropy)) # compute the public point point = curve.generator * pass_factor pass_point = _key_from_point(point, True) prefix = '\x2c\xe9\xb3\xe1\xff\x39\xe2' if lot is None: prefix += chr(0x53) else: prefix += chr(0x51) # make a nice human readable string, beginning with "passphrase" return util.base58.encode_check(prefix + owner_entropy + pass_point)
Python
def _check_confirmation_code(confirmation_code, passphrase, coin = coins.Bitcoin): '''Verifies a confirmation code with passphrase and returns a Confirmation object.''' payload = util.base58.decode_check(confirmation_code) if payload[:5] != '\x64\x3b\xf6\xa8\x9a': raise ValueError('invalid confirmation code prefix') # de-serialize the payload flagbyte = ord(payload[5]) address_hash = payload[6:10] owner_entropy = payload[10:18] encrypted_pointb = payload[18:] # check for compressed flag compressed = False if flagbyte & 0x20: compressed = True # check for a lot and sequence lot = None sequence = None owner_salt = owner_entropy if flagbyte & 0x04: lot_sequence = struct.unpack('>I', owner_entropy[4:8])[0] lot = lot_sequence >> 12 sequence = lot_sequence & 0xfff owner_salt = owner_entropy[:4] prefactor = util.scrypt(_normalize_utf(passphrase), owner_salt, 16384, 8, 8, 32) if lot is None: pass_factor = string_to_number(prefactor) else: pass_factor = string_to_number(util.hash.sha256d(prefactor + owner_entropy)) # determine the passpoint point = curve.generator * pass_factor pass_point = _key_from_point(point, True) # derive the key that was used to encrypt the pointb salt = address_hash + owner_entropy derived_key = util.scrypt(pass_point, salt, 1024, 1, 1, 64) (derived_half1, derived_half2) = (derived_key[:32], derived_key[32:]) aes = AES(derived_half2) # decrypt the pointb pointb_prefix = ord(encrypted_pointb[0]) ^ (ord(derived_half2[31]) & 0x01) pointbx1 = _decrypt_xor(encrypted_pointb[1:17], derived_half1[:16], aes) pointbx2 = _decrypt_xor(encrypted_pointb[17:], derived_half1[16:], aes) pointb = chr(pointb_prefix) + pointbx1 + pointbx2 # compute the public key (and address) point = _key_to_point(pointb) * pass_factor public_key = _key_from_point(point, compressed) address = util.key.publickey_to_address(public_key, coin.address_version) # verify the checksum if util.sha256d(address)[:4] != address_hash: raise ValueError('invalid passphrase') # wrap it up in a nice object self = Confirmation.__new__(Confirmation) self._public_key = public_key self._address = address self._compressed = compressed self._lot = lot self._sequence = sequence self._coin = coin return self
def _check_confirmation_code(confirmation_code, passphrase, coin = coins.Bitcoin): '''Verifies a confirmation code with passphrase and returns a Confirmation object.''' payload = util.base58.decode_check(confirmation_code) if payload[:5] != '\x64\x3b\xf6\xa8\x9a': raise ValueError('invalid confirmation code prefix') # de-serialize the payload flagbyte = ord(payload[5]) address_hash = payload[6:10] owner_entropy = payload[10:18] encrypted_pointb = payload[18:] # check for compressed flag compressed = False if flagbyte & 0x20: compressed = True # check for a lot and sequence lot = None sequence = None owner_salt = owner_entropy if flagbyte & 0x04: lot_sequence = struct.unpack('>I', owner_entropy[4:8])[0] lot = lot_sequence >> 12 sequence = lot_sequence & 0xfff owner_salt = owner_entropy[:4] prefactor = util.scrypt(_normalize_utf(passphrase), owner_salt, 16384, 8, 8, 32) if lot is None: pass_factor = string_to_number(prefactor) else: pass_factor = string_to_number(util.hash.sha256d(prefactor + owner_entropy)) # determine the passpoint point = curve.generator * pass_factor pass_point = _key_from_point(point, True) # derive the key that was used to encrypt the pointb salt = address_hash + owner_entropy derived_key = util.scrypt(pass_point, salt, 1024, 1, 1, 64) (derived_half1, derived_half2) = (derived_key[:32], derived_key[32:]) aes = AES(derived_half2) # decrypt the pointb pointb_prefix = ord(encrypted_pointb[0]) ^ (ord(derived_half2[31]) & 0x01) pointbx1 = _decrypt_xor(encrypted_pointb[1:17], derived_half1[:16], aes) pointbx2 = _decrypt_xor(encrypted_pointb[17:], derived_half1[16:], aes) pointb = chr(pointb_prefix) + pointbx1 + pointbx2 # compute the public key (and address) point = _key_to_point(pointb) * pass_factor public_key = _key_from_point(point, compressed) address = util.key.publickey_to_address(public_key, coin.address_version) # verify the checksum if util.sha256d(address)[:4] != address_hash: raise ValueError('invalid passphrase') # wrap it up in a nice object self = Confirmation.__new__(Confirmation) self._public_key = public_key self._address = address self._compressed = compressed self._lot = lot self._sequence = sequence self._coin = coin return self
Python
def _decrypt_printed_private_key(private_key, passphrase, coin = coins.Bitcoin): 'Decrypts a printed private key returning an instance of PrintedAddress.' payload = util.base58.decode_check(private_key) if payload[0:2] != '\x01\x43': raise ValueError('invalid printed address private key prefix') if len(payload) != 39: raise ValueError('invalid printed address private key length') # de-serialize the payload flagbyte = ord(payload[2]) address_hash = payload[3:7] owner_entropy = payload[7:15] encrypted_quarter1 = payload[15:23] encrypted_half2 = payload[23:39] # check for compressed flag compressed = False if flagbyte & 0x20: compressed = True # check for lot and sequence (lot, sequence) = (None, None) owner_salt = owner_entropy if flagbyte & 0x04: lot_sequence = struct.unpack('>I', owner_entropy[4:8])[0] lot = lot_sequence >> 12 sequence = lot_sequence & 0xfff owner_salt = owner_entropy[0:4] prefactor = util.scrypt(_normalize_utf(passphrase), owner_salt, 16384, 8, 8, 32) if lot is None: pass_factor = string_to_number(prefactor) else: pass_factor = string_to_number(util.hash.sha256d(prefactor + owner_entropy)) # compute the public point point = curve.generator * pass_factor pass_point = _key_from_point(point, True) # derive the key that was used to encrypt the seedb; based on the public point derived_key = util.scrypt(pass_point, address_hash + owner_entropy, 1024, 1, 1, 64) (derived_half1, derived_half2) = (derived_key[:32], derived_key[32:]) aes = AES(derived_half2) # decrypt the seedb (it was nested, so we work backward) decrypted_half2 = _decrypt_xor(encrypted_half2, derived_half1[16:], aes) encrypted_half1 = encrypted_quarter1 + decrypted_half2[:8] decrypted_half1 = _decrypt_xor(encrypted_half1, derived_half1[:16], aes) # compute the seedb seedb = decrypted_half1 + decrypted_half2[8:16] factorb = string_to_number(util.sha256d(seedb)) # compute the secret exponent secexp = (factorb * pass_factor) % curve.order # convert it to a private key private_key = number_to_string(secexp, curve.order) if compressed: private_key += chr(0x01) # wrap it up in a nice object self = PrintedAddress.__new__(PrintedAddress) Address.__init__(self, private_key = util.key.privkey_to_wif(private_key), coin = coin) self._lot = lot self._sequence = sequence # verify the checksum if address_hash != util.sha256d(self.address)[:4]: raise ValueError('incorrect passphrase') return self
def _decrypt_printed_private_key(private_key, passphrase, coin = coins.Bitcoin): 'Decrypts a printed private key returning an instance of PrintedAddress.' payload = util.base58.decode_check(private_key) if payload[0:2] != '\x01\x43': raise ValueError('invalid printed address private key prefix') if len(payload) != 39: raise ValueError('invalid printed address private key length') # de-serialize the payload flagbyte = ord(payload[2]) address_hash = payload[3:7] owner_entropy = payload[7:15] encrypted_quarter1 = payload[15:23] encrypted_half2 = payload[23:39] # check for compressed flag compressed = False if flagbyte & 0x20: compressed = True # check for lot and sequence (lot, sequence) = (None, None) owner_salt = owner_entropy if flagbyte & 0x04: lot_sequence = struct.unpack('>I', owner_entropy[4:8])[0] lot = lot_sequence >> 12 sequence = lot_sequence & 0xfff owner_salt = owner_entropy[0:4] prefactor = util.scrypt(_normalize_utf(passphrase), owner_salt, 16384, 8, 8, 32) if lot is None: pass_factor = string_to_number(prefactor) else: pass_factor = string_to_number(util.hash.sha256d(prefactor + owner_entropy)) # compute the public point point = curve.generator * pass_factor pass_point = _key_from_point(point, True) # derive the key that was used to encrypt the seedb; based on the public point derived_key = util.scrypt(pass_point, address_hash + owner_entropy, 1024, 1, 1, 64) (derived_half1, derived_half2) = (derived_key[:32], derived_key[32:]) aes = AES(derived_half2) # decrypt the seedb (it was nested, so we work backward) decrypted_half2 = _decrypt_xor(encrypted_half2, derived_half1[16:], aes) encrypted_half1 = encrypted_quarter1 + decrypted_half2[:8] decrypted_half1 = _decrypt_xor(encrypted_half1, derived_half1[:16], aes) # compute the seedb seedb = decrypted_half1 + decrypted_half2[8:16] factorb = string_to_number(util.sha256d(seedb)) # compute the secret exponent secexp = (factorb * pass_factor) % curve.order # convert it to a private key private_key = number_to_string(secexp, curve.order) if compressed: private_key += chr(0x01) # wrap it up in a nice object self = PrintedAddress.__new__(PrintedAddress) Address.__init__(self, private_key = util.key.privkey_to_wif(private_key), coin = coin) self._lot = lot self._sequence = sequence # verify the checksum if address_hash != util.sha256d(self.address)[:4]: raise ValueError('incorrect passphrase') return self
Python
def disconnected(self, peer): 'Called by a peer after it has been closed.' BaseNode.disconnected(self, peer) if peer in self._inflight_blocks: del self._inflight_blocks[peer]
def disconnected(self, peer): 'Called by a peer after it has been closed.' BaseNode.disconnected(self, peer) if peer in self._inflight_blocks: del self._inflight_blocks[peer]
Python
def pbkdf2(password, salt, count, key_length, prf): '''Returns the result of the Password-Based Key Derivation Function 2. prf - a psuedorandom function See http://en.wikipedia.org/wiki/PBKDF2 ''' def f(block_number): '''The function "f".''' U = prf(password, salt + struct.pack('>L', block_number)) if count > 1: U = [ c for c in U ] for i in xrange(2, 1 + count): block_xor(prf(password, ''.join(U)), 0, U, 0, len(U)) U = ''.join(U) return U size = 0 block_number = 0 blocks = [ ] # The iterations while size < key_length: block_number += 1 block = f(block_number) blocks.append(block) size += len(block) return ''.join(blocks)[:key_length]
def pbkdf2(password, salt, count, key_length, prf): '''Returns the result of the Password-Based Key Derivation Function 2. prf - a psuedorandom function See http://en.wikipedia.org/wiki/PBKDF2 ''' def f(block_number): '''The function "f".''' U = prf(password, salt + struct.pack('>L', block_number)) if count > 1: U = [ c for c in U ] for i in xrange(2, 1 + count): block_xor(prf(password, ''.join(U)), 0, U, 0, len(U)) U = ''.join(U) return U size = 0 block_number = 0 blocks = [ ] # The iterations while size < key_length: block_number += 1 block = f(block_number) blocks.append(block) size += len(block) return ''.join(blocks)[:key_length]
Python
def relay(self, message, peer): "Relay a message for a peer, providing it has not reached its quota." # relaying will not be implemented until checkpoints are return # quota reached for this peer if self._relay_count.get(peer, 0) > MAX_RELAY_COUNT: return # track this relay request if peer not in self._relay_count: self._relay_count[peer] = 0 self._relay_count[peer] += 1 # relay to every peer except the sender for n in self.peers: if n == peer: continue peer.send(message)
def relay(self, message, peer): "Relay a message for a peer, providing it has not reached its quota." # relaying will not be implemented until checkpoints are return # quota reached for this peer if self._relay_count.get(peer, 0) > MAX_RELAY_COUNT: return # track this relay request if peer not in self._relay_count: self._relay_count[peer] = 0 self._relay_count[peer] += 1 # relay to every peer except the sender for n in self.peers: if n == peer: continue peer.send(message)
Python
def _decay_relay(self): 'Apply aging policy for throttling relaying per peer.' # relaying will not be implemented until checkpoints are return dt = time.time() - self._last_relay_decay for peer in list(self._relay_count): count = self._relay_count[peer] count -= dt * RELAY_COUNT_DECAY if count <= 0.0: del self._relay_count[peer] else: self._relay_cound[peer] = count self._last_relay_decay = time.time()
def _decay_relay(self): 'Apply aging policy for throttling relaying per peer.' # relaying will not be implemented until checkpoints are return dt = time.time() - self._last_relay_decay for peer in list(self._relay_count): count = self._relay_count[peer] count -= dt * RELAY_COUNT_DECAY if count <= 0.0: del self._relay_count[peer] else: self._relay_cound[peer] = count self._last_relay_decay = time.time()
Python
def serve_forever(self): 'Block and begin accepting connections.' try: asyncore.loop(5, map = self) except StopNode, e: pass finally: self.handle_close()
def serve_forever(self): 'Block and begin accepting connections.' try: asyncore.loop(5, map = self) except StopNode, e: pass finally: self.handle_close()
Python
def add_peer(self, address, force = True): '''Connect a new peer peer. If force is False, and we already have max_peers, then the peer is not connected.''' # already have enough peers if not force and len(self) >= self._max_peers: return False # already a peer if address in [n.address for n in self.values()]: return False try: # asyncore keeps a reference in the map (ie. node = self) connection.Connection(address = address, node = self) except Exception, e: self.log(str(e)) return False return True
def add_peer(self, address, force = True): '''Connect a new peer peer. If force is False, and we already have max_peers, then the peer is not connected.''' # already have enough peers if not force and len(self) >= self._max_peers: return False # already a peer if address in [n.address for n in self.values()]: return False try: # asyncore keeps a reference in the map (ie. node = self) connection.Connection(address = address, node = self) except Exception, e: self.log(str(e)) return False return True
Python
def invalid_alert(self, peer, alert): '''Called by a peer when an alert is invalid. An alert is invalid if the signature is incorrect, the alert has expired or the alert does not apply to this protocol version.''' self.log("Ignored alert: %s" % alert)
def invalid_alert(self, peer, alert): '''Called by a peer when an alert is invalid. An alert is invalid if the signature is incorrect, the alert has expired or the alert does not apply to this protocol version.''' self.log("Ignored alert: %s" % alert)
Python
def disconnected(self, peer): 'Called by a peer after it has been closed.' if peer.address in self._addresses: del self._addresses[peer.address]
def disconnected(self, peer): 'Called by a peer after it has been closed.' if peer.address in self._addresses: del self._addresses[peer.address]
Python
def _check_external_ip_address(self): '''We rely on the peers we have connected to to tell us our IP address. Until we have connected peers though, this means we tell a white lie; we give our bound network ip address (usually 127.0.0.1). We ask all our peers what our address is, and take the majority answer. Even if we are lied to by dishonest/insane peers, the only problem is that we lie a bit more to our peers, which they likely ignore anyways, since they can determine it themselves more accurately from the tcp packets (like we do in handle_accept).''' tally = dict() for peer in self.peers: address = peer.external_ip_address if address is None: continue if address not in tally: tally[address] = 0 tally[address] += 1 if tally: tally = [(tally[a], a) for a in tally] tally.sort() self._guessed_external_ip_address = tally[-1][1]
def _check_external_ip_address(self): '''We rely on the peers we have connected to to tell us our IP address. Until we have connected peers though, this means we tell a white lie; we give our bound network ip address (usually 127.0.0.1). We ask all our peers what our address is, and take the majority answer. Even if we are lied to by dishonest/insane peers, the only problem is that we lie a bit more to our peers, which they likely ignore anyways, since they can determine it themselves more accurately from the tcp packets (like we do in handle_accept).''' tally = dict() for peer in self.peers: address = peer.external_ip_address if address is None: continue if address not in tally: tally[address] = 0 tally[address] += 1 if tally: tally = [(tally[a], a) for a in tally] tally.sort() self._guessed_external_ip_address = tally[-1][1]
Python
def heartbeat(self): 'Called about every 10 seconds to perform maintenance tasks.' peers = self.peers # if we need more peer connections, attempt to add some (up to 5 at a time) for i in xrange(0, min(self._seek_peers - len(peers), 5)): self.add_any_peer() # if we don't have many addresses ask any peer for some more if peers and len(self._addresses) < 50: peer = random.choice(peers) peer.send_message(protocol.GetAddress()) # Give a little back to peers that were bad but seem to be good now for peer in peers: peer.reduce_banscore() # Give all the peers a little more room for relaying self._decay_relay()
def heartbeat(self): 'Called about every 10 seconds to perform maintenance tasks.' peers = self.peers # if we need more peer connections, attempt to add some (up to 5 at a time) for i in xrange(0, min(self._seek_peers - len(peers), 5)): self.add_any_peer() # if we don't have many addresses ask any peer for some more if peers and len(self._addresses) < 50: peer = random.choice(peers) peer.send_message(protocol.GetAddress()) # Give a little back to peers that were bad but seem to be good now for peer in peers: peer.reduce_banscore() # Give all the peers a little more room for relaying self._decay_relay()
Python
def handle_accept(self): 'Incoming connection, connect it if we have avaialble connections.' pair = self.accept() if not pair: return (sock, address) = pair # we banned this address less than an hour ago if address in self._banned: if time.time() - self._banned[address] < 3600: sock.close() return del self._banned[address] # we are not accepting incoming connections; drop it if not self._listen: sock.close() return # asyncore keeps a reference to us in the map (ie. node = self) connection.Connection(node = self, sock = sock, address = address)
def handle_accept(self): 'Incoming connection, connect it if we have avaialble connections.' pair = self.accept() if not pair: return (sock, address) = pair # we banned this address less than an hour ago if address in self._banned: if time.time() - self._banned[address] < 3600: sock.close() return del self._banned[address] # we are not accepting incoming connections; drop it if not self._listen: sock.close() return # asyncore keeps a reference to us in the map (ie. node = self) connection.Connection(node = self, sock = sock, address = address)
Python
def b58encode(v): "encode v, which is a string of bytes, to base58." long_value = 0L for (i, c) in enumerate(v[::-1]): long_value += (256 ** i) * ord(c) result = '' while long_value >= __b58base: (div, mod) = divmod(long_value, __b58base) result = __b58chars[mod] + result long_value = div result = __b58chars[long_value] + result # Bitcoin does a little leading-zero-compression: # leading 0-bytes in the input become leading-1s nPad = 0 for c in v: if c == '\0': nPad += 1 else: break return (__b58chars[0] * nPad) + result
def b58encode(v): "encode v, which is a string of bytes, to base58." long_value = 0L for (i, c) in enumerate(v[::-1]): long_value += (256 ** i) * ord(c) result = '' while long_value >= __b58base: (div, mod) = divmod(long_value, __b58base) result = __b58chars[mod] + result long_value = div result = __b58chars[long_value] + result # Bitcoin does a little leading-zero-compression: # leading 0-bytes in the input become leading-1s nPad = 0 for c in v: if c == '\0': nPad += 1 else: break return (__b58chars[0] * nPad) + result
Python
def b58decode(v, length = None): "decode v into a string of len bytes" long_value = 0L for (i, c) in enumerate(v[::-1]): long_value += __b58chars.find(c) * (__b58base ** i) result = '' while long_value >= 256: (div, mod) = divmod(long_value, 256) result = chr(mod) + result long_value = div result = chr(long_value) + result nPad = 0 for c in v: if c == __b58chars[0]: nPad += 1 else: break result = chr(0) * nPad + result if length is not None and len(result) != length: return None return result
def b58decode(v, length = None): "decode v into a string of len bytes" long_value = 0L for (i, c) in enumerate(v[::-1]): long_value += __b58chars.find(c) * (__b58base ** i) result = '' while long_value >= 256: (div, mod) = divmod(long_value, 256) result = chr(mod) + result long_value = div result = chr(long_value) + result nPad = 0 for c in v: if c == __b58chars[0]: nPad += 1 else: break result = chr(0) * nPad + result if length is not None and len(result) != length: return None return result
Python
def encode_check(payload): 'Returns the base58 encoding with a 4-byte checksum.' checksum = sha256d(payload)[:4] return b58encode(payload + checksum)
def encode_check(payload): 'Returns the base58 encoding with a 4-byte checksum.' checksum = sha256d(payload)[:4] return b58encode(payload + checksum)
Python
def decode_check(payload): 'Returns the base58 decoded value, verifying the checksum.' payload = b58decode(payload, None) if payload and sha256d(payload[:-4])[:4] == payload[-4:]: return payload[:-4] return None
def decode_check(payload): 'Returns the base58 decoded value, verifying the checksum.' payload = b58decode(payload, None) if payload and sha256d(payload[:-4])[:4] == payload[-4:]: return payload[:-4] return None
Python
def _load_get_attr(self, name): 'Return an internal attribute after ensuring the headers is loaded if necessary.' if self._mode == MODE_READ and self._N is None: self._read_header() return getattr(self, name)
def _load_get_attr(self, name): 'Return an internal attribute after ensuring the headers is loaded if necessary.' if self._mode == MODE_READ and self._N is None: self._read_header() return getattr(self, name)
Python
def fileno(self): '''Integer "file descriptor" for the underlying file object. This is needed for lower-level file interfaces, such os.read().''' return self._fp.fileno()
def fileno(self): '''Integer "file descriptor" for the underlying file object. This is needed for lower-level file interfaces, such os.read().''' return self._fp.fileno()
Python
def close(self): '''Close the underlying file. Sets data attribute .closed to True. A closed file cannot be used for further I/O operations. close() may be called more than once without error. Some kinds of file objects (for example, opened by popen()) may return an exit status upon closing.''' if self._mode == MODE_WRITE and self._valid is None: self._finalize_write() result = self._fp.close() self._closed = True return result
def close(self): '''Close the underlying file. Sets data attribute .closed to True. A closed file cannot be used for further I/O operations. close() may be called more than once without error. Some kinds of file objects (for example, opened by popen()) may return an exit status upon closing.''' if self._mode == MODE_WRITE and self._valid is None: self._finalize_write() result = self._fp.close() self._closed = True return result
Python
def finalize(self): '''Write the final checksum without closing the file. This may be required if the underlying file obeject cannot be closed in a meaningful way (for example: StringIO will release underlying memory)''' if self._mode == MODE_WRITE and self._valid is None: self._finalize_write()
def finalize(self): '''Write the final checksum without closing the file. This may be required if the underlying file obeject cannot be closed in a meaningful way (for example: StringIO will release underlying memory)''' if self._mode == MODE_WRITE and self._valid is None: self._finalize_write()
Python
def verify_file(fp, password): 'Returns whether a scrypt encrypted file is valid.' sf = ScryptFile(fp = fp, password = password) for line in sf: pass sf.close() return sf.valid
def verify_file(fp, password): 'Returns whether a scrypt encrypted file is valid.' sf = ScryptFile(fp = fp, password = password) for line in sf: pass sf.close() return sf.valid
Python
def readline(self, size = None): '''Next line from the decrypted file, as a string. Retain newline. A non-negative size argument limits the maximum number of bytes to return (an incomplete line may be returned then). Return an empty string at EOF.''' if self.closed: raise ValueError('file closed') if self._mode == MODE_WRITE: raise Exception('file opened for write only') if self._read_finished: return None line = '' while not line.endswith('\n') and not self._read_finished and (size is None or len(line) <= size): line += self.read(1) return line
def readline(self, size = None): '''Next line from the decrypted file, as a string. Retain newline. A non-negative size argument limits the maximum number of bytes to return (an incomplete line may be returned then). Return an empty string at EOF.''' if self.closed: raise ValueError('file closed') if self._mode == MODE_WRITE: raise Exception('file opened for write only') if self._read_finished: return None line = '' while not line.endswith('\n') and not self._read_finished and (size is None or len(line) <= size): line += self.read(1) return line
Python
def readlines(self, sizehint = None): '''list of strings, each a decrypted line from the file. Call readline() repeatedly and return a list of the lines so read. The optional size argument, if given, is an approximate bound on the total number of bytes in the lines returned.''' return list(self)
def readlines(self, sizehint = None): '''list of strings, each a decrypted line from the file. Call readline() repeatedly and return a list of the lines so read. The optional size argument, if given, is an approximate bound on the total number of bytes in the lines returned.''' return list(self)
Python
def _read_header(self): '''Read and parse the header and calculate derived keys.''' try: # Read the entire header header = self._fp.read(96) if len(header) != 96: raise InvalidScryptFileFormat("Incomplete header") # Magic number if header[0:6] != 'scrypt': raise InvalidScryptFileFormat('Invalid magic number").') # Version (we only support 0) version = ord(header[6]) if version != 0: raise InvalidScryptFileFormat('Unsupported version (%d)' % version) # Scrypt parameters self._N = 1 << ord(header[7]) (self._r, self._p) = struct.unpack('>II', header[8:16]) self._salt = header[16:48] # Generate the key self._key = hash(self._password, self._salt, self._N, self._r, self._p, 64) # Header Checksum checksum = header[48:64] calculate_checksum = hashlib.sha256(header[0:48]).digest()[:16] if checksum != calculate_checksum: raise InvalidScryptFileFormat('Incorrect header checksum') # Stream checksum checksum = header[64:96] self._checksumer = hmac.new(self.key[32:], msg = header[0:64], digestmod = hashlib.sha256) if checksum != self._checksumer.digest(): raise InvalidScryptFileFormat('Incorrect header stream checksum') self._checksumer.update(header[64:96]) # Prepare the AES engine counter = aesctr.Counter(nbits = 128, initial_value = 0) self._crypto = aesctr.AESCounterModeOfOperation(key = self.key[:32], counter = counter) self._done_header = True except InvalidScryptFileFormat, e: self.close() raise e except Exception, e: self.close() raise InvalidScryptFileFormat('Header error (%s)' % e)
def _read_header(self): '''Read and parse the header and calculate derived keys.''' try: # Read the entire header header = self._fp.read(96) if len(header) != 96: raise InvalidScryptFileFormat("Incomplete header") # Magic number if header[0:6] != 'scrypt': raise InvalidScryptFileFormat('Invalid magic number").') # Version (we only support 0) version = ord(header[6]) if version != 0: raise InvalidScryptFileFormat('Unsupported version (%d)' % version) # Scrypt parameters self._N = 1 << ord(header[7]) (self._r, self._p) = struct.unpack('>II', header[8:16]) self._salt = header[16:48] # Generate the key self._key = hash(self._password, self._salt, self._N, self._r, self._p, 64) # Header Checksum checksum = header[48:64] calculate_checksum = hashlib.sha256(header[0:48]).digest()[:16] if checksum != calculate_checksum: raise InvalidScryptFileFormat('Incorrect header checksum') # Stream checksum checksum = header[64:96] self._checksumer = hmac.new(self.key[32:], msg = header[0:64], digestmod = hashlib.sha256) if checksum != self._checksumer.digest(): raise InvalidScryptFileFormat('Incorrect header stream checksum') self._checksumer.update(header[64:96]) # Prepare the AES engine counter = aesctr.Counter(nbits = 128, initial_value = 0) self._crypto = aesctr.AESCounterModeOfOperation(key = self.key[:32], counter = counter) self._done_header = True except InvalidScryptFileFormat, e: self.close() raise e except Exception, e: self.close() raise InvalidScryptFileFormat('Header error (%s)' % e)
Python
def read(self, size = None): '''Read at most size bytes, returned as a string. If the size argument is negative or omitted, read until EOF is reached. Notice that when in non-blocking mode, less data than what was requested may be returned, even if no size parameter was given.''' if self.closed: raise ValueError('File closed') if self._mode == MODE_WRITE: raise Exception('File opened for write only') if not self._done_header: self._read_header() # The encrypted file has been entirely read, so return as much as they want # and remove the returned portion from the decrypted buffer if self._read_finished: if size is None: decrypted = self._decrypted_buffer else: decrypted = self._decrypted_buffer[:size] self._decrypted_buffer = self._decrypted[len(decrypted):] return decypted # Read everything in one chunk if size is None or size < 0: self._encrypted_buffer = self._fp.read() self._read_finished = True else: # We fill the encrypted buffer (keeping it with a minimum of 32 bytes in case of the # end-of-file checksum) and decrypt into a decrypted buffer 1 block at a time while not self._read_finished: # We have enough decrypted bytes (or will after decrypting the encrypted buffer) available = len(self._decrypted_buffer) + len(self._encrypted_buffer) - 32 if available >= size: break # Read a little extra for the possible final checksum data = self._fp.read(BLOCK_SIZE) # No data left; we're done if not data: self._read_finished = True break self._encrypted_buffer += data # Decrypt as much of the encrypted data as possible (leaving the final check sum) safe = self._encrypted_buffer[:-32] self._encrypted_buffer = self._encrypted_buffer[-32:] self._decrypted_buffer += self._crypto.decrypt(safe) self._checksumer.update(safe) # We read all the bytes, only the checksum remains if self._read_finished: self._check_final_checksum(self._encrypted_buffer) # Send back the number of bytes requests and remove them from the buffer decrypted = self._decrypted_buffer[:size] self._decrypted_buffer = self._decrypted_buffer[size:] return decrypted
def read(self, size = None): '''Read at most size bytes, returned as a string. If the size argument is negative or omitted, read until EOF is reached. Notice that when in non-blocking mode, less data than what was requested may be returned, even if no size parameter was given.''' if self.closed: raise ValueError('File closed') if self._mode == MODE_WRITE: raise Exception('File opened for write only') if not self._done_header: self._read_header() # The encrypted file has been entirely read, so return as much as they want # and remove the returned portion from the decrypted buffer if self._read_finished: if size is None: decrypted = self._decrypted_buffer else: decrypted = self._decrypted_buffer[:size] self._decrypted_buffer = self._decrypted[len(decrypted):] return decypted # Read everything in one chunk if size is None or size < 0: self._encrypted_buffer = self._fp.read() self._read_finished = True else: # We fill the encrypted buffer (keeping it with a minimum of 32 bytes in case of the # end-of-file checksum) and decrypt into a decrypted buffer 1 block at a time while not self._read_finished: # We have enough decrypted bytes (or will after decrypting the encrypted buffer) available = len(self._decrypted_buffer) + len(self._encrypted_buffer) - 32 if available >= size: break # Read a little extra for the possible final checksum data = self._fp.read(BLOCK_SIZE) # No data left; we're done if not data: self._read_finished = True break self._encrypted_buffer += data # Decrypt as much of the encrypted data as possible (leaving the final check sum) safe = self._encrypted_buffer[:-32] self._encrypted_buffer = self._encrypted_buffer[-32:] self._decrypted_buffer += self._crypto.decrypt(safe) self._checksumer.update(safe) # We read all the bytes, only the checksum remains if self._read_finished: self._check_final_checksum(self._encrypted_buffer) # Send back the number of bytes requests and remove them from the buffer decrypted = self._decrypted_buffer[:size] self._decrypted_buffer = self._decrypted_buffer[size:] return decrypted
Python
def flush(self): "Flush the underlying file object's I/O buffer." if self._mode == MODE_WRITE: self._fp.flush()
def flush(self): "Flush the underlying file object's I/O buffer." if self._mode == MODE_WRITE: self._fp.flush()
Python
def writelines(self, sequence): '''Write the strings to the underlying file object. Note that newlines are not added. The sequence can be any iterable object producing strings. This is equivalent to calling write() for each string.''' self.write(''.join(sequence))
def writelines(self, sequence): '''Write the strings to the underlying file object. Note that newlines are not added. The sequence can be any iterable object producing strings. This is equivalent to calling write() for each string.''' self.write(''.join(sequence))
Python
def _write_header(self): 'Writes the header to the underlying file object.' header = 'scrypt' + chr(0) + struct.pack('>BII', math.log(self.N, 2), self.r, self.p) + self.salt # Add the header checksum to the header checksum = hashlib.sha256(header).digest()[:16] header += checksum # Add the header stream checksum self._checksumer = hmac.new(self.key[32:], msg = header, digestmod = hashlib.sha256) checksum = self._checksumer.digest() header += checksum self._checksumer.update(checksum) # Write the header self._fp.write(header) # Prepare the AES engine counter = aesctr.Counter(nbits = 128, initial_value = 0) self._crypto = aesctr.AESCounterModeOfOperation(key = self.key[:32], counter = counter) #self._crypto = aes(self.key[:32]) self._done_header = True
def _write_header(self): 'Writes the header to the underlying file object.' header = 'scrypt' + chr(0) + struct.pack('>BII', math.log(self.N, 2), self.r, self.p) + self.salt # Add the header checksum to the header checksum = hashlib.sha256(header).digest()[:16] header += checksum # Add the header stream checksum self._checksumer = hmac.new(self.key[32:], msg = header, digestmod = hashlib.sha256) checksum = self._checksumer.digest() header += checksum self._checksumer.update(checksum) # Write the header self._fp.write(header) # Prepare the AES engine counter = aesctr.Counter(nbits = 128, initial_value = 0) self._crypto = aesctr.AESCounterModeOfOperation(key = self.key[:32], counter = counter) #self._crypto = aes(self.key[:32]) self._done_header = True
Python
def _finalize_write(self): 'Finishes any unencrypted bytes and writes the final checksum.' # Make sure we have written the header if not self._done_header: self._write_header() # Write the remaining decrypted part to disk block = self._crypto.encrypt(self._decrypted_buffer) self._decrypted = '' self._fp.write(block) self._checksumer.update(block) # Write the final checksum self._fp.write(self._checksumer.digest()) self._valid = True
def _finalize_write(self): 'Finishes any unencrypted bytes and writes the final checksum.' # Make sure we have written the header if not self._done_header: self._write_header() # Write the remaining decrypted part to disk block = self._crypto.encrypt(self._decrypted_buffer) self._decrypted = '' self._fp.write(block) self._checksumer.update(block) # Write the final checksum self._fp.write(self._checksumer.digest()) self._valid = True
Python
def write(self, str): '''Write string str to the underlying file. Note that due to buffering, flush() or close() may be needed before the file on disk reflects the data written.''' if self.closed: raise ValueError('File closed') if self._mode == MODE_READ: raise Exception('File opened for read only') if self._valid is not None: raise Exception('file already finalized') if not self._done_header: self._write_header() # Encrypt and write the data encrypted = self._crypto.encrypt(str) self._checksumer.update(encrypted) self._fp.write(encrypted)
def write(self, str): '''Write string str to the underlying file. Note that due to buffering, flush() or close() may be needed before the file on disk reflects the data written.''' if self.closed: raise ValueError('File closed') if self._mode == MODE_READ: raise Exception('File opened for read only') if self._valid is not None: raise Exception('file already finalized') if not self._done_header: self._write_header() # Encrypt and write the data encrypted = self._crypto.encrypt(str) self._checksumer.update(encrypted) self._fp.write(encrypted)
Python
def array_overwrite(source, source_start, dest, dest_start, length): '''Overwrites the dest array with the source array.''' for i in xrange(0, length): dest[dest_start + i] = source[source_start + i]
def array_overwrite(source, source_start, dest, dest_start, length): '''Overwrites the dest array with the source array.''' for i in xrange(0, length): dest[dest_start + i] = source[source_start + i]
Python
def block_xor(source, source_start, dest, dest_start, length): '''Performs xor on arrays source and dest, storing the result back in dest.''' for i in xrange(0, length): dest[dest_start + i] = chr(ord(dest[dest_start + i]) ^ ord(source[source_start + i]))
def block_xor(source, source_start, dest, dest_start, length): '''Performs xor on arrays source and dest, storing the result back in dest.''' for i in xrange(0, length): dest[dest_start + i] = chr(ord(dest[dest_start + i]) ^ ord(source[source_start + i]))
Python
def verify(transaction): "Veryify a transaction's inputs and outputs." try: # do the inputs afford the outputs? (coinbase is an exception) fees = 0 if transaction.index != 0: sum_in = sum(po_value(transaction, i) for i in xrange(0, len(transaction.inputs))) sum_out = sum(o.value for o in transaction.outputs) fees = sum_in - sum_out if fees < 0: print sum_in, sum_out print "FAIL(sum_in < sum_out)", transaction return (False, [], 0) # are all inputs valid against their previous output? txio = script.Script(transaction) valid = txio.verify() addresses = [txio.output_address(o) for o in xrange(0, txio.output_count)] if not valid: print transaction return (valid, addresses, fees) except Exception, e: print transaction, e import traceback traceback.print_exc() raise e
def verify(transaction): "Veryify a transaction's inputs and outputs." try: # do the inputs afford the outputs? (coinbase is an exception) fees = 0 if transaction.index != 0: sum_in = sum(po_value(transaction, i) for i in xrange(0, len(transaction.inputs))) sum_out = sum(o.value for o in transaction.outputs) fees = sum_in - sum_out if fees < 0: print sum_in, sum_out print "FAIL(sum_in < sum_out)", transaction return (False, [], 0) # are all inputs valid against their previous output? txio = script.Script(transaction) valid = txio.verify() addresses = [txio.output_address(o) for o in xrange(0, txio.output_count)] if not valid: print transaction return (valid, addresses, fees) except Exception, e: print transaction, e import traceback traceback.print_exc() raise e
Python
def _todo_rollback(self, block): 'Undo all unspent transactions for a block. Must be the latest valid block.' # this would break our data model (but shouldn't be possible anyways) if block._blockid <= 1: raise ValueError('cannot remove pre-genesis block') # get the base uock for this block (ie. txn_index == output_index == 0) txck = keys.get_txck(block._blockid, 0) uock = keys.get_uock(txck, 0) # begin a transaction, locking out other updates cursor = self._connection.cursor() cursor.execute('begin immediate transaction') # make sure we are removing a block we have already added last_valid_block = self.get_metadata(cursor, KEY_LAST_VALID_BLOCK) if last_valid_block != block._blockid: raise ValueError('can only rollback last valid block') # remove all outputs raise NotImplemented() # re-add all inputs' outputs raise NotImplemented() # the most recent block is now the previous block self.set_metadata(cursor, KEY_LAST_VALID_BLOCK, block._previous_blockid) self._connection.commit()
def _todo_rollback(self, block): 'Undo all unspent transactions for a block. Must be the latest valid block.' # this would break our data model (but shouldn't be possible anyways) if block._blockid <= 1: raise ValueError('cannot remove pre-genesis block') # get the base uock for this block (ie. txn_index == output_index == 0) txck = keys.get_txck(block._blockid, 0) uock = keys.get_uock(txck, 0) # begin a transaction, locking out other updates cursor = self._connection.cursor() cursor.execute('begin immediate transaction') # make sure we are removing a block we have already added last_valid_block = self.get_metadata(cursor, KEY_LAST_VALID_BLOCK) if last_valid_block != block._blockid: raise ValueError('can only rollback last valid block') # remove all outputs raise NotImplemented() # re-add all inputs' outputs raise NotImplemented() # the most recent block is now the previous block self.set_metadata(cursor, KEY_LAST_VALID_BLOCK, block._previous_blockid) self._connection.commit()
Python
def update(self, block): '''Updates the unspent transaction output (utxo) database with the transactions from a block.''' t0 = time.time() txns = block.transactions for txn in txns: txn.cache_previous_outputs() t1 = time.time() if self._pool is None: results = map(verify, txns) else: results = self._pool.map(verify, txns) # make sure the coinbase's output doesn't exceed its permitted fees fees = sum(r[2] for r in results) fees += self.coin.block_creation_fee(block) sum_out = sum(o.value for o in txns[0].outputs) if fees < sum_out: raise InvalidTransactionException('invalid coinbase fee') t2 = time.time() # update the database self._update(block, [(t, r[0], r[1]) for (t, r) in zip(txns, results)]) t3 = time.time() print "Processed %d transaction (cache=%fs, compute=%fs, update=%fs)" % (len(txns), t1 - t0, t2 - t1, t3 - t2)
def update(self, block): '''Updates the unspent transaction output (utxo) database with the transactions from a block.''' t0 = time.time() txns = block.transactions for txn in txns: txn.cache_previous_outputs() t1 = time.time() if self._pool is None: results = map(verify, txns) else: results = self._pool.map(verify, txns) # make sure the coinbase's output doesn't exceed its permitted fees fees = sum(r[2] for r in results) fees += self.coin.block_creation_fee(block) sum_out = sum(o.value for o in txns[0].outputs) if fees < sum_out: raise InvalidTransactionException('invalid coinbase fee') t2 = time.time() # update the database self._update(block, [(t, r[0], r[1]) for (t, r) in zip(txns, results)]) t3 = time.time() print "Processed %d transaction (cache=%fs, compute=%fs, update=%fs)" % (len(txns), t1 - t0, t2 - t1, t3 - t2)
Python
def parse_variable_set(data, kind): '''Reads a set of Parsable objects prefixed with a VarInteger. Any object can be used that supports parse(data), which returns a tuple of (bytes_consumed, value).''' (offset, count) = FormatTypeVarInteger.parse(data) result = [ ] index = 0 while index < count: (item_length, item_obj) = kind.parse(data[offset:]) result.append(item_obj) index += 1 offset += item_length return (offset, result)
def parse_variable_set(data, kind): '''Reads a set of Parsable objects prefixed with a VarInteger. Any object can be used that supports parse(data), which returns a tuple of (bytes_consumed, value).''' (offset, count) = FormatTypeVarInteger.parse(data) result = [ ] index = 0 while index < count: (item_length, item_obj) = kind.parse(data[offset:]) result.append(item_obj) index += 1 offset += item_length return (offset, result)
Python
def validate(self, obj): '''Returns the object to store if obj is valid for this type, otherwise None. The type returned should be immutable.''' raise NotImplemented()
def validate(self, obj): '''Returns the object to store if obj is valid for this type, otherwise None. The type returned should be immutable.''' raise NotImplemented()
Python
def parse(self, data): '''Returns a (length, value) tuple where length is the amount of data that was consumed.''' raise NotImplemented()
def parse(self, data): '''Returns a (length, value) tuple where length is the amount of data that was consumed.''' raise NotImplemented()
Python
def copy_model(name, model_to_copy, db_table, options=None): """ Deep copy a model's fields and database attributes, so that we don't modify the original models table. """ copy_meta = getattr(model_to_copy, '_meta') fields = copy_meta.fields app_label = copy_meta.app_label module = model_to_copy.__module__ class Meta: # Using type('Meta', ...) gives a dictproxy error during model creation pass # app_label must be set using the Meta inner class if app_label: setattr(Meta, 'app_label', app_label) # Update Meta with any options that were provided if options is not None: for key, value in options.items(): setattr(Meta, key, value) # Set up a dictionary to simulate declarations within a class # Create the class, which automatically triggers ModelBase processing attrs = dict() attrs['__module__'] = module attrs['Meta'] = Meta # Prepare to copy all fields from existing model. if fields: names = types = [] copy_meta = getattr(model_to_copy, '_meta') for item in copy_meta.concrete_fields: names.append(item.name) types.append(item) field_dict = dict(zip(names, types)) attrs.update(field_dict) model = type(name, (models.Model,), attrs) # Remove from model registry immediately so it doesn't complain about us changing the model. ModelRegistry(app_label).unregister_model(name) new_meta = getattr(model, '_meta') for f in new_meta.concrete_fields: f.cached_col = Col(db_table, f) return model
def copy_model(name, model_to_copy, db_table, options=None): """ Deep copy a model's fields and database attributes, so that we don't modify the original models table. """ copy_meta = getattr(model_to_copy, '_meta') fields = copy_meta.fields app_label = copy_meta.app_label module = model_to_copy.__module__ class Meta: # Using type('Meta', ...) gives a dictproxy error during model creation pass # app_label must be set using the Meta inner class if app_label: setattr(Meta, 'app_label', app_label) # Update Meta with any options that were provided if options is not None: for key, value in options.items(): setattr(Meta, key, value) # Set up a dictionary to simulate declarations within a class # Create the class, which automatically triggers ModelBase processing attrs = dict() attrs['__module__'] = module attrs['Meta'] = Meta # Prepare to copy all fields from existing model. if fields: names = types = [] copy_meta = getattr(model_to_copy, '_meta') for item in copy_meta.concrete_fields: names.append(item.name) types.append(item) field_dict = dict(zip(names, types)) attrs.update(field_dict) model = type(name, (models.Model,), attrs) # Remove from model registry immediately so it doesn't complain about us changing the model. ModelRegistry(app_label).unregister_model(name) new_meta = getattr(model, '_meta') for f in new_meta.concrete_fields: f.cached_col = Col(db_table, f) return model
Python
def matrix(self, matrix): """Set the inner matrix to a different layout, assumes the user will respect the format that is returned when a string representation is asked, dash '-' for frame, 'x' for lines and any other character for filling""" new_height = len(matrix) - 2 new_width = len(matrix[0]) - 2 if new_height < 0 or new_width < 0: raise ValueError("invalid matrix: {0}".format(matrix)) self._matrix = matrix self._height = new_height self._width = new_width
def matrix(self, matrix): """Set the inner matrix to a different layout, assumes the user will respect the format that is returned when a string representation is asked, dash '-' for frame, 'x' for lines and any other character for filling""" new_height = len(matrix) - 2 new_width = len(matrix[0]) - 2 if new_height < 0 or new_width < 0: raise ValueError("invalid matrix: {0}".format(matrix)) self._matrix = matrix self._height = new_height self._width = new_width
Python
def draw_line(self, x1, y1, x2, y2): """ given two points in the space draws a line from p1 to p2, it can only draw vertical and horizontal lines""" if x1 == x2: self.draw_vertical_line(x1, y1, y2) elif y1 == y2: self.draw_horizontal_line(y1, x1, x2) else: print("Only Vertical and Horizontal lines supported")
def draw_line(self, x1, y1, x2, y2): """ given two points in the space draws a line from p1 to p2, it can only draw vertical and horizontal lines""" if x1 == x2: self.draw_vertical_line(x1, y1, y2) elif y1 == y2: self.draw_horizontal_line(y1, x1, x2) else: print("Only Vertical and Horizontal lines supported")
Python
def draw_vertical_line(self, x, y1, y2): """ given the x position draws a line of length |y2 - y1|""" (origin, end) = (min(y1, y2), max(y1, y2)) if not (self.in_range(x, y1) and self.in_range(x, y2)): print("Line out of range") else: for i in range(origin, end + 1): self.matrix[i][x] = "x"
def draw_vertical_line(self, x, y1, y2): """ given the x position draws a line of length |y2 - y1|""" (origin, end) = (min(y1, y2), max(y1, y2)) if not (self.in_range(x, y1) and self.in_range(x, y2)): print("Line out of range") else: for i in range(origin, end + 1): self.matrix[i][x] = "x"
Python
def draw_horizontal_line(self, y, x1, x2): """ given the y position draws a line of length |x2 - x1|""" (origin, end) = (min(x1, x2), max(x1, x2)) if not (self.in_range(x1, y) and self.in_range(x2, y)): print("Line out of range") else: for i in range(origin, end + 1): self.matrix[y][i] = "x"
def draw_horizontal_line(self, y, x1, x2): """ given the y position draws a line of length |x2 - x1|""" (origin, end) = (min(x1, x2), max(x1, x2)) if not (self.in_range(x1, y) and self.in_range(x2, y)): print("Line out of range") else: for i in range(origin, end + 1): self.matrix[y][i] = "x"
Python
def draw_rectangle(self, x1, y1, x2, y2): """ given two points in the space draws a rectangle parallel to the x and y axis with using the points as upper left corner and lower right corner""" (min_x, max_x, min_y, max_y) = (min(x1, x2), max(x1, x2), min(y1, y2), max(y1, y2)) if not (self.in_range(min_x, min_y) and self.in_range(max_x, max_y)): print("Rectangle out of range") else: self.draw_horizontal_line(min_y, min_x, max_x) self.draw_horizontal_line(max_y, min_x, max_x) self.draw_vertical_line(min_x, min_y, max_y) self.draw_vertical_line(max_x, min_y, max_y)
def draw_rectangle(self, x1, y1, x2, y2): """ given two points in the space draws a rectangle parallel to the x and y axis with using the points as upper left corner and lower right corner""" (min_x, max_x, min_y, max_y) = (min(x1, x2), max(x1, x2), min(y1, y2), max(y1, y2)) if not (self.in_range(min_x, min_y) and self.in_range(max_x, max_y)): print("Rectangle out of range") else: self.draw_horizontal_line(min_y, min_x, max_x) self.draw_horizontal_line(max_y, min_x, max_x) self.draw_vertical_line(min_x, min_y, max_y) self.draw_vertical_line(max_x, min_y, max_y)
Python
def fill(self, x, y, colour): """ given a point in space and a colour, fill the space spreading from the point vertically and horizontally stoping at borders (x) and frames (| or -)""" if not self.in_range(x, y): print("Fill origin out of range") else: visited = set() queue = deque([(x,y)]) while queue: (x0, y0) = queue.popleft() visited.add((x0,y0)) self.matrix[y0][x0] = colour for e in [-1, 1]: advance_y = (x0, y0 + e) advance_x = (x0 + e, y0) for point in [advance_x, advance_y]: if (self.in_range_point(point) and (point not in visited) and self.matrix[point[1]][point[0]] != "x"): queue.append(point)
def fill(self, x, y, colour): """ given a point in space and a colour, fill the space spreading from the point vertically and horizontally stoping at borders (x) and frames (| or -)""" if not self.in_range(x, y): print("Fill origin out of range") else: visited = set() queue = deque([(x,y)]) while queue: (x0, y0) = queue.popleft() visited.add((x0,y0)) self.matrix[y0][x0] = colour for e in [-1, 1]: advance_y = (x0, y0 + e) advance_x = (x0 + e, y0) for point in [advance_x, advance_y]: if (self.in_range_point(point) and (point not in visited) and self.matrix[point[1]][point[0]] != "x"): queue.append(point)
Python
def from_layout(layout): """returns a canvas from a matrix layout""" matrix = [[item for item in line] for line in layout.split("\n")] new_canvas = Canvas(0, 0) new_canvas.matrix = matrix return new_canvas
def from_layout(layout): """returns a canvas from a matrix layout""" matrix = [[item for item in line] for line in layout.split("\n")] new_canvas = Canvas(0, 0) new_canvas.matrix = matrix return new_canvas
Python
def step(self, time_passed: float) -> Tuple[Vector2, Vector2]: """Calculates the change in velocity and movement of this body Use seconds for time_passed Returns ------- Change in velocity (Vector2; None if this body does not have an acceptor) Movement (Change in position) (Vector2)""" res = None if self.acceptor is not None: res = self.acceptor.calculate(time_passed) return res, self.physics.calculate(time_passed)
def step(self, time_passed: float) -> Tuple[Vector2, Vector2]: """Calculates the change in velocity and movement of this body Use seconds for time_passed Returns ------- Change in velocity (Vector2; None if this body does not have an acceptor) Movement (Change in position) (Vector2)""" res = None if self.acceptor is not None: res = self.acceptor.calculate(time_passed) return res, self.physics.calculate(time_passed)
Python
def ensure_metadata(body: PotentialBody, *defaults) -> BodyWithMetadata: """Ensures that type(body) == BodyWithMetadata Specify additional arguments to set the default metadata if the body is just a plain Body""" if isinstance(body, BodyWithMetadata): return body return BodyWithMetadata(body, *defaults)
def ensure_metadata(body: PotentialBody, *defaults) -> BodyWithMetadata: """Ensures that type(body) == BodyWithMetadata Specify additional arguments to set the default metadata if the body is just a plain Body""" if isinstance(body, BodyWithMetadata): return body return BodyWithMetadata(body, *defaults)
Python
def iter_ensure_metadata(it: Iterable[PotentialBody], name_format: str = 'body%03i', *defaults) -> Generator[BodyWithMetadata]: """Generator over it that calls BodyWithMetadata.ensure_metadata name_format is a percent format string that indicates the default name for a body This replaces the first argument for defaults""" yield from ( BodyWithMetadata.ensure_metadata(body, name_format % i, *defaults) for (i, body) in enumerate(it) )
def iter_ensure_metadata(it: Iterable[PotentialBody], name_format: str = 'body%03i', *defaults) -> Generator[BodyWithMetadata]: """Generator over it that calls BodyWithMetadata.ensure_metadata name_format is a percent format string that indicates the default name for a body This replaces the first argument for defaults""" yield from ( BodyWithMetadata.ensure_metadata(body, name_format % i, *defaults) for (i, body) in enumerate(it) )
Python
def form_data(self): """Provide a dictionary of valid form data.""" return { 'name': 'Cat Crawl', 'description': 'Find all the cats.', 'crawler': 'ache', 'seeds_list': self.get_seeds(), 'crawl_model': self.test_crawlmodel.pk, }
def form_data(self): """Provide a dictionary of valid form data.""" return { 'name': 'Cat Crawl', 'description': 'Find all the cats.', 'crawler': 'ache', 'seeds_list': self.get_seeds(), 'crawl_model': self.test_crawlmodel.pk, }
Python
def slugs(self): """Return a dictionary with a "test" project slug.""" return dict(slugs=dict( project_slug="test"))
def slugs(self): """Return a dictionary with a "test" project slug.""" return dict(slugs=dict( project_slug="test"))
Python
def run(self): """Run the crawl. Note ---- This method should be called via super() from a subclass. Operation --------- First, remove the stop file if it exists. This is a convenient action while developing, however, #TODO should raise an error if the stop file exists for a crawl that has not yet run. Second, open a subprocess based on `self.call` (defined by a subclass) and direct its output to an appropriate log file. Update the status. Then, as long as the process is running, every five seconds: 1. Log statistics 2. Check if a stop file has appeared, exit the process accordingly When a process has ended--either naturally or after a stop was requested--the crawl status is updated. """ rm_if_exists(self.stop_file) with open(join(self.crawl_dir, 'crawl_proc.log'), 'a') as stdout: self.proc = subprocess.Popen(self.call, stdout=stdout, stderr=subprocess.STDOUT, preexec_fn=os.setsid) self.crawl.status = "running" self.crawl.save() stopped_by_user = False while self.proc.poll() is None: self.log_statistics() if rm_if_exists(self.stop_file): stopped_by_user = True break sys.stdout.write(".") sys.stdout.flush() time.sleep(5) if stopped_by_user: os.killpg(self.proc.pid, signal.SIGTERM) self.crawl.status = "stopped" self.crawl.save() return True
def run(self): """Run the crawl. Note ---- This method should be called via super() from a subclass. Operation --------- First, remove the stop file if it exists. This is a convenient action while developing, however, #TODO should raise an error if the stop file exists for a crawl that has not yet run. Second, open a subprocess based on `self.call` (defined by a subclass) and direct its output to an appropriate log file. Update the status. Then, as long as the process is running, every five seconds: 1. Log statistics 2. Check if a stop file has appeared, exit the process accordingly When a process has ended--either naturally or after a stop was requested--the crawl status is updated. """ rm_if_exists(self.stop_file) with open(join(self.crawl_dir, 'crawl_proc.log'), 'a') as stdout: self.proc = subprocess.Popen(self.call, stdout=stdout, stderr=subprocess.STDOUT, preexec_fn=os.setsid) self.crawl.status = "running" self.crawl.save() stopped_by_user = False while self.proc.poll() is None: self.log_statistics() if rm_if_exists(self.stop_file): stopped_by_user = True break sys.stdout.write(".") sys.stdout.flush() time.sleep(5) if stopped_by_user: os.killpg(self.proc.pid, signal.SIGTERM) self.crawl.status = "stopped" self.crawl.save() return True
Python
def onsuppressions(unit, *args): """ SUPPRESSIONS() - allows to specify files with suppression notation which will be used by address, leak or thread sanitizer runtime by default. Use asan.supp filename for address sanitizer, lsan.supp for leak sanitizer and tsan.supp for thread sanitizer suppressions respectively. See https://clang.llvm.org/docs/AddressSanitizer.html#suppressing-memory-leaks for details. """ import os valid = ("asan.supp", "tsan.supp", "lsan.supp") if unit.get("SANITIZER_TYPE") in ("leak", "address", "thread"): for x in args: if os.path.basename(x) not in valid: unit.message(['error', "Invalid suppression filename: {} (any of the following is expected: {})".format(x, valid)]) return unit.onsrcs(["GLOBAL"] + list(args))
def onsuppressions(unit, *args): """ SUPPRESSIONS() - allows to specify files with suppression notation which will be used by address, leak or thread sanitizer runtime by default. Use asan.supp filename for address sanitizer, lsan.supp for leak sanitizer and tsan.supp for thread sanitizer suppressions respectively. See https://clang.llvm.org/docs/AddressSanitizer.html#suppressing-memory-leaks for details. """ import os valid = ("asan.supp", "tsan.supp", "lsan.supp") if unit.get("SANITIZER_TYPE") in ("leak", "address", "thread"): for x in args: if os.path.basename(x) not in valid: unit.message(['error', "Invalid suppression filename: {} (any of the following is expected: {})".format(x, valid)]) return unit.onsrcs(["GLOBAL"] + list(args))
Python
def _cephes_vs_amos_points(self): """Yield points at which to compare Cephes implementation to AMOS""" # check several points, including large-amplitude ones for v in [-120, -100.3, -20., -10., -1., -.5, 0., 1., 12.49, 120., 301]: for z in [-1300, -11, -10, -1, 1., 10., 200.5, 401., 600.5, 700.6, 1300, 10003]: yield v, z # check half-integers; these are problematic points at least # for cephes/iv for v in 0.5 + arange(-60, 60): yield v, 3.5
def _cephes_vs_amos_points(self): """Yield points at which to compare Cephes implementation to AMOS""" # check several points, including large-amplitude ones for v in [-120, -100.3, -20., -10., -1., -.5, 0., 1., 12.49, 120., 301]: for z in [-1300, -11, -10, -1, 1., 10., 200.5, 401., 600.5, 700.6, 1300, 10003]: yield v, z # check half-integers; these are problematic points at least # for cephes/iv for v in 0.5 + arange(-60, 60): yield v, 3.5
Python
def _series(self, v, z, n=100): """Compute Struve function & error estimate from its power series.""" k = arange(0, n) r = (-1)**k * (.5*z)**(2*k+v+1)/special.gamma(k+1.5)/special.gamma(k+v+1.5) err = abs(r).max() * finfo(float_).eps * n return r.sum(), err
def _series(self, v, z, n=100): """Compute Struve function & error estimate from its power series.""" k = arange(0, n) r = (-1)**k * (.5*z)**(2*k+v+1)/special.gamma(k+1.5)/special.gamma(k+v+1.5) err = abs(r).max() * finfo(float_).eps * n return r.sum(), err
Python
def upcast_scalar(dtype, scalar): """Determine data type for binary operation between an array of type `dtype` and a scalar. """ return (np.array([0], dtype=dtype) * scalar).dtype
def upcast_scalar(dtype, scalar): """Determine data type for binary operation between an array of type `dtype` and a scalar. """ return (np.array([0], dtype=dtype) * scalar).dtype
Python
def downcast_intp_index(arr): """ Down-cast index array to np.intp dtype if it is of a larger dtype. Raise an error if the array contains a value that is too large for intp. """ if arr.dtype.itemsize > np.dtype(np.intp).itemsize: if arr.size == 0: return arr.astype(np.intp) maxval = arr.max() minval = arr.min() if maxval > np.iinfo(np.intp).max or minval < np.iinfo(np.intp).min: raise ValueError("Cannot deal with arrays with indices larger " "than the machine maximum address size " "(e.g. 64-bit indices on 32-bit machine).") return arr.astype(np.intp) return arr
def downcast_intp_index(arr): """ Down-cast index array to np.intp dtype if it is of a larger dtype. Raise an error if the array contains a value that is too large for intp. """ if arr.dtype.itemsize > np.dtype(np.intp).itemsize: if arr.size == 0: return arr.astype(np.intp) maxval = arr.max() minval = arr.min() if maxval > np.iinfo(np.intp).max or minval < np.iinfo(np.intp).min: raise ValueError("Cannot deal with arrays with indices larger " "than the machine maximum address size " "(e.g. 64-bit indices on 32-bit machine).") return arr.astype(np.intp) return arr
Python
def _slicetoarange(self, j, shape): """ Given a slice object, use numpy arange to change it to a 1D array. """ start, stop, step = j.indices(shape) return np.arange(start, stop, step)
def _slicetoarange(self, j, shape): """ Given a slice object, use numpy arange to change it to a 1D array. """ start, stop, step = j.indices(shape) return np.arange(start, stop, step)
Python
def _check_ellipsis(self, index): """Process indices with Ellipsis. Returns modified index.""" if index is Ellipsis: return (slice(None), slice(None)) elif isinstance(index, tuple): # Find first ellipsis for j, v in enumerate(index): if v is Ellipsis: first_ellipsis = j break else: first_ellipsis = None # Expand the first one if first_ellipsis is not None: # Shortcuts if len(index) == 1: return (slice(None), slice(None)) elif len(index) == 2: if first_ellipsis == 0: if index[1] is Ellipsis: return (slice(None), slice(None)) else: return (slice(None), index[1]) else: return (index[0], slice(None)) # General case tail = () for v in index[first_ellipsis+1:]: if v is not Ellipsis: tail = tail + (v,) nd = first_ellipsis + len(tail) nslice = max(0, 2 - nd) return index[:first_ellipsis] + (slice(None),)*nslice + tail return index
def _check_ellipsis(self, index): """Process indices with Ellipsis. Returns modified index.""" if index is Ellipsis: return (slice(None), slice(None)) elif isinstance(index, tuple): # Find first ellipsis for j, v in enumerate(index): if v is Ellipsis: first_ellipsis = j break else: first_ellipsis = None # Expand the first one if first_ellipsis is not None: # Shortcuts if len(index) == 1: return (slice(None), slice(None)) elif len(index) == 2: if first_ellipsis == 0: if index[1] is Ellipsis: return (slice(None), slice(None)) else: return (slice(None), index[1]) else: return (index[0], slice(None)) # General case tail = () for v in index[first_ellipsis+1:]: if v is not Ellipsis: tail = tail + (v,) nd = first_ellipsis + len(tail) nslice = max(0, 2 - nd) return index[:first_ellipsis] + (slice(None),)*nslice + tail return index
Python
def approx_jacobian(x,func,epsilon,*args): """ Approximate the Jacobian matrix of a callable function. Parameters ---------- x : array_like The state vector at which to compute the Jacobian matrix. func : callable f(x,*args) The vector-valued function. epsilon : float The perturbation used to determine the partial derivatives. args : sequence Additional arguments passed to func. Returns ------- An array of dimensions ``(lenf, lenx)`` where ``lenf`` is the length of the outputs of `func`, and ``lenx`` is the number of elements in `x`. Notes ----- The approximation is done using forward differences. """ x0 = asfarray(x) f0 = atleast_1d(func(*((x0,)+args))) jac = zeros([len(x0),len(f0)]) dx = zeros(len(x0)) for i in range(len(x0)): dx[i] = epsilon jac[i] = (func(*((x0+dx,)+args)) - f0)/epsilon dx[i] = 0.0 return jac.transpose()
def approx_jacobian(x,func,epsilon,*args): """ Approximate the Jacobian matrix of a callable function. Parameters ---------- x : array_like The state vector at which to compute the Jacobian matrix. func : callable f(x,*args) The vector-valued function. epsilon : float The perturbation used to determine the partial derivatives. args : sequence Additional arguments passed to func. Returns ------- An array of dimensions ``(lenf, lenx)`` where ``lenf`` is the length of the outputs of `func`, and ``lenx`` is the number of elements in `x`. Notes ----- The approximation is done using forward differences. """ x0 = asfarray(x) f0 = atleast_1d(func(*((x0,)+args))) jac = zeros([len(x0),len(f0)]) dx = zeros(len(x0)) for i in range(len(x0)): dx[i] = epsilon jac[i] = (func(*((x0+dx,)+args)) - f0)/epsilon dx[i] = 0.0 return jac.transpose()